Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -10,10 +10,10 @@ from langchain.chains import ConversationalRetrievalChain
|
|
10 |
from htmlTemplates import css, bot_template, user_template
|
11 |
from langchain.llms import HuggingFaceHub, LlamaCpp
|
12 |
from huggingface_hub import snapshot_download, hf_hub_download
|
13 |
-
from prompts import CONDENSE_QUESTION_PROMPT
|
14 |
|
15 |
-
repo_name = "IlyaGusev/
|
16 |
-
model_name = "model-
|
17 |
|
18 |
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
19 |
|
@@ -67,7 +67,7 @@ def get_conversation_chain(vectorstore, model_name):
|
|
67 |
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
|
68 |
|
69 |
conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm,
|
70 |
-
condense_question_prompt=CONDENSE_QUESTION_PROMPT,
|
71 |
retriever=vectorstore.as_retriever(),
|
72 |
memory=memory
|
73 |
)
|
|
|
10 |
from htmlTemplates import css, bot_template, user_template
|
11 |
from langchain.llms import HuggingFaceHub, LlamaCpp
|
12 |
from huggingface_hub import snapshot_download, hf_hub_download
|
13 |
+
#from prompts import CONDENSE_QUESTION_PROMPT
|
14 |
|
15 |
+
repo_name = "IlyaGusev/saiga2_13b_gguf"
|
16 |
+
model_name = "model-q4_K.gguf"
|
17 |
|
18 |
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
19 |
|
|
|
67 |
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
|
68 |
|
69 |
conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm,
|
70 |
+
#condense_question_prompt=CONDENSE_QUESTION_PROMPT,
|
71 |
retriever=vectorstore.as_retriever(),
|
72 |
memory=memory
|
73 |
)
|