gli-mrunal commited on
Commit
b1ecb1f
·
1 Parent(s): 5e1edfd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -23,7 +23,8 @@ from langchain.text_splitter import CharacterTextSplitter
23
  from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
24
  from langchain.vectorstores import FAISS # FAISS instead of PineCone
25
  # from langchain.llms import OpenAI
26
- from langchain.chat_models import ChatOpenAI
 
27
  from langchain.memory import ConversationBufferMemory
28
  from langchain.chains import ConversationalRetrievalChain
29
  from htmlTemplates import css, bot_template, user_template
@@ -54,7 +55,8 @@ def get_vectorstore(text_chunks):
54
 
55
  def get_conversation_chain(vectorstore):
56
  #llm = OpenAI()
57
- llm = ChatOpenAI()
 
58
  memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
59
  conversation_chain = ConversationalRetrievalChain.from_llm(
60
  llm=llm,
 
23
  from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
24
  from langchain.vectorstores import FAISS # FAISS instead of PineCone
25
  # from langchain.llms import OpenAI
26
+ from langchain.llms import HuggingFaceHub
27
+ #from langchain.chat_models import ChatOpenAI
28
  from langchain.memory import ConversationBufferMemory
29
  from langchain.chains import ConversationalRetrievalChain
30
  from htmlTemplates import css, bot_template, user_template
 
55
 
56
  def get_conversation_chain(vectorstore):
57
  #llm = OpenAI()
58
+ #llm = ChatOpenAI()
59
+ llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
60
  memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
61
  conversation_chain = ConversationalRetrievalChain.from_llm(
62
  llm=llm,