JPLTedCas commited on
Commit
a2a898e
·
verified ·
1 Parent(s): d10d5d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -13,6 +13,7 @@ from langchain.chains import ConversationalRetrievalChain
13
  from htmlTemplates import css, bot_template, user_template
14
  from langchain.llms import HuggingFaceHub
15
  from langchain.vectorstores import Chroma
 
16
 
17
 
18
  # set this key as an environment variable
@@ -148,10 +149,8 @@ def get_conversation_chain(vectorstore:FAISS) -> ConversationalRetrievalChain:
148
  # repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
149
  # model_kwargs={"temperature": 0.5, "max_new_tokens": 1024, "max_length": 1048, "top_k": 3, "trust_remote_code": True, "torch_dtype": "auto"},
150
  #)
151
- llm = HuggingFaceHub(
152
- repo_id="TheBloke/Orca-2-13B-GGUF",
153
- model_kwargs={"temperature": 0.5, "max_new_tokens": 1024, "max_length": 1048, "top_k": 3, "trust_remote_code": True, "torch_dtype": "auto"},
154
- )
155
  # llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613")
156
 
157
 
 
13
  from htmlTemplates import css, bot_template, user_template
14
  from langchain.llms import HuggingFaceHub
15
  from langchain.vectorstores import Chroma
16
+ from gpt4all import GPT4All
17
 
18
 
19
  # set this key as an environment variable
 
149
  # repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
150
  # model_kwargs={"temperature": 0.5, "max_new_tokens": 1024, "max_length": 1048, "top_k": 3, "trust_remote_code": True, "torch_dtype": "auto"},
151
  #)
152
+
153
+ llm = GPT4All("TheBloke/Orca-2-13B-GGUF")
 
 
154
  # llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613")
155
 
156