updated embeddings
Browse files
app.py
CHANGED
@@ -13,12 +13,7 @@ from langchain_community.vectorstores.faiss import FAISS
|
|
13 |
import time
|
14 |
from PyPDF2 import PdfReader
|
15 |
import tempfile
|
16 |
-
from dotenv import load_dotenv
|
17 |
-
load_dotenv()
|
18 |
|
19 |
-
## Load the Groq API key
|
20 |
-
# groq_api_key = os.getenv('GROQ_API_KEY')
|
21 |
-
# google_api_key = os.getenv('GOOGLE_API_KEY')
|
22 |
|
23 |
st.title("Ask your questions from pdf(s) or website")
|
24 |
option = None
|
@@ -60,7 +55,7 @@ def llm_model():
|
|
60 |
print("Response time :", time.process_time()-start)
|
61 |
st.write(response['answer'])
|
62 |
|
63 |
-
st.session_state.embeddings =GoogleGenerativeAIEmbeddings(model = 'models/embedding-001')
|
64 |
st.session_state.text_splitter = RecursiveCharacterTextSplitter(chunk_size =1000, chunk_overlap= 200)
|
65 |
|
66 |
if option:
|
|
|
13 |
import time
|
14 |
from PyPDF2 import PdfReader
|
15 |
import tempfile
|
|
|
|
|
16 |
|
|
|
|
|
|
|
17 |
|
18 |
st.title("Ask your questions from pdf(s) or website")
|
19 |
option = None
|
|
|
55 |
print("Response time :", time.process_time()-start)
|
56 |
st.write(response['answer'])
|
57 |
|
58 |
+
st.session_state.embeddings = GoogleGenerativeAIEmbeddings(model = 'models/embedding-001')
|
59 |
st.session_state.text_splitter = RecursiveCharacterTextSplitter(chunk_size =1000, chunk_overlap= 200)
|
60 |
|
61 |
if option:
|