Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -197,7 +197,8 @@ chain = ConversationChain(
|
|
197 |
verbose=True,
|
198 |
)
|
199 |
|
200 |
-
|
|
|
201 |
def get_llama_response(message: str, history: list) -> str:
|
202 |
query_text = message
|
203 |
|
@@ -224,11 +225,9 @@ def get_llama_response(message: str, history: list) -> str:
|
|
224 |
|
225 |
#print(template)
|
226 |
chain.prompt=prompt
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
231 |
-
return(response)
|
232 |
|
233 |
import gradio as gr
|
234 |
|
|
|
197 |
verbose=True,
|
198 |
)
|
199 |
|
200 |
+
!pip install langchain==0.0.233
|
201 |
+
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
|
202 |
def get_llama_response(message: str, history: list) -> str:
|
203 |
query_text = message
|
204 |
|
|
|
225 |
|
226 |
#print(template)
|
227 |
chain.prompt=prompt
|
228 |
+
res = chain(query_text)
|
229 |
+
|
230 |
+
return(res['response'])
|
|
|
|
|
231 |
|
232 |
import gradio as gr
|
233 |
|