yashasgupta commited on
Commit
1501391
·
verified ·
1 Parent(s): 8aeec65

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -8
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
  from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
3
  from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
4
-
5
 
6
  st.title(':blue[Langchain:] A Rag System on “Leave No Context Behind” Paper')
7
  st.header("AI Chatbot :robot_face:")
@@ -29,8 +29,7 @@ chat_template = ChatPromptTemplate.from_messages([
29
 
30
  from langchain_google_genai import ChatGoogleGenerativeAI
31
 
32
- chat_model = ChatGoogleGenerativeAI(google_api_key=KEY,
33
- model="gemini-1.5-pro-latest")
34
 
35
  from langchain_core.output_parsers import StrOutputParser
36
 
@@ -38,8 +37,8 @@ output_parser = StrOutputParser()
38
 
39
  chain = chat_template | chat_model | output_parser
40
 
41
- from langchain_community.document_loaders import PDFMinerLoader # type: ignore
42
- dat = PDFMinerLoader(r"D:\Langchain\rag_system\2404.07143.pdf")
43
  dat_nik =dat.load()
44
  # Split the document into chunks
45
 
@@ -52,12 +51,11 @@ chunks = text_splitter.split_documents(dat_nik)
52
  # We are just loading OpenAIEmbeddings
53
  from langchain_google_genai import GoogleGenerativeAIEmbeddings # type: ignore
54
 
55
- embedding_model = GoogleGenerativeAIEmbeddings(google_api_key=KEY,
56
- model="models/embedding-001")
57
 
58
  # vectors = embeddings.embed_documents(chunks)
59
  # Store the chunks in vector store
60
- from langchain_community.vectorstores import Chroma # type: ignore
61
 
62
  # Creating a New Chroma Database
63
  db = Chroma.from_documents(chunks, embedding_model, persist_directory="./chroma_db_1")
 
1
  import streamlit as st
2
  from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
3
  from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
4
+ import os
5
 
6
  st.title(':blue[Langchain:] A Rag System on “Leave No Context Behind” Paper')
7
  st.header("AI Chatbot :robot_face:")
 
29
 
30
  from langchain_google_genai import ChatGoogleGenerativeAI
31
 
32
+ chat_model = ChatGoogleGenerativeAI(model="gemini-1.5-pro-latest")
 
33
 
34
  from langchain_core.output_parsers import StrOutputParser
35
 
 
37
 
38
  chain = chat_template | chat_model | output_parser
39
 
40
+ from langchain_community.document_loaders import PDFMinerLoader
41
+ dat = PDFMinerLoader(r"2404.07143.pdf")
42
  dat_nik =dat.load()
43
  # Split the document into chunks
44
 
 
51
  # We are just loading OpenAIEmbeddings
52
  from langchain_google_genai import GoogleGenerativeAIEmbeddings # type: ignore
53
 
54
+ embedding_model = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
 
55
 
56
  # vectors = embeddings.embed_documents(chunks)
57
  # Store the chunks in vector store
58
+ from langchain_community.vectorstores import Chroma
59
 
60
  # Creating a New Chroma Database
61
  db = Chroma.from_documents(chunks, embedding_model, persist_directory="./chroma_db_1")