Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,8 +9,11 @@ from langchain.memory import ConversationBufferMemory
|
|
9 |
from langchain.chains import ConversationalRetrievalChain
|
10 |
from htmlTemplates import css, bot_template, user_template
|
11 |
from langchain.llms import HuggingFaceHub
|
|
|
12 |
# from langchain.callbacks import get_openai_callback
|
13 |
|
|
|
|
|
14 |
def get_pdf_text(pdf_docs):
|
15 |
text = ""
|
16 |
for pdf in pdf_docs:
|
@@ -41,7 +44,7 @@ def get_vectorstore(text_chunks):
|
|
41 |
|
42 |
def get_conversation_chain(vectorstore):
|
43 |
# llm = ChatOpenAI(model_name="gpt-3.5-turbo-16k")
|
44 |
-
llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
|
45 |
|
46 |
memory = ConversationBufferMemory(
|
47 |
memory_key='chat_history', return_messages=True)
|
|
|
9 |
from langchain.chains import ConversationalRetrievalChain
|
10 |
from htmlTemplates import css, bot_template, user_template
|
11 |
from langchain.llms import HuggingFaceHub
|
12 |
+
import os
|
13 |
# from langchain.callbacks import get_openai_callback
|
14 |
|
15 |
+
hub_token = os.environ["HUGGINGFACEHUB_TOKEN"]
|
16 |
+
|
17 |
def get_pdf_text(pdf_docs):
|
18 |
text = ""
|
19 |
for pdf in pdf_docs:
|
|
|
44 |
|
45 |
def get_conversation_chain(vectorstore):
|
46 |
# llm = ChatOpenAI(model_name="gpt-3.5-turbo-16k")
|
47 |
+
llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", huggingfacehub_api_token=hub_token, model_kwargs={"temperature":0.5, "max_length":512})
|
48 |
|
49 |
memory = ConversationBufferMemory(
|
50 |
memory_key='chat_history', return_messages=True)
|