Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,6 +13,7 @@ from langchain.chains import ConversationalRetrievalChain
|
|
13 |
from htmlTemplates import css, bot_template, user_template
|
14 |
from langchain.llms import HuggingFaceHub
|
15 |
from langchain.vectorstores import Chroma
|
|
|
16 |
|
17 |
|
18 |
# set this key as an environment variable
|
@@ -148,10 +149,8 @@ def get_conversation_chain(vectorstore:FAISS) -> ConversationalRetrievalChain:
|
|
148 |
# repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
149 |
# model_kwargs={"temperature": 0.5, "max_new_tokens": 1024, "max_length": 1048, "top_k": 3, "trust_remote_code": True, "torch_dtype": "auto"},
|
150 |
#)
|
151 |
-
|
152 |
-
|
153 |
-
model_kwargs={"temperature": 0.5, "max_new_tokens": 1024, "max_length": 1048, "top_k": 3, "trust_remote_code": True, "torch_dtype": "auto"},
|
154 |
-
)
|
155 |
# llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613")
|
156 |
|
157 |
|
|
|
13 |
from htmlTemplates import css, bot_template, user_template
|
14 |
from langchain.llms import HuggingFaceHub
|
15 |
from langchain.vectorstores import Chroma
|
16 |
+
from gpt4all import GPT4All
|
17 |
|
18 |
|
19 |
# set this key as an environment variable
|
|
|
149 |
# repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
150 |
# model_kwargs={"temperature": 0.5, "max_new_tokens": 1024, "max_length": 1048, "top_k": 3, "trust_remote_code": True, "torch_dtype": "auto"},
|
151 |
#)
|
152 |
+
|
153 |
+
llm = GPT4All("TheBloke/Orca-2-13B-GGUF")
|
|
|
|
|
154 |
# llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613")
|
155 |
|
156 |
|