Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -202,12 +202,12 @@ def get_llama_response(message):
|
|
202 |
query_text = message
|
203 |
|
204 |
|
205 |
-
|
206 |
if len(results) == 0 or results[0][1] < 0.5:
|
207 |
print(f"Unable to find matching results.")
|
208 |
|
209 |
-
|
210 |
-
|
211 |
|
212 |
template = """
|
213 |
The following is a conversation between a human an AI. The AI acts exactly like Dwight K Schrute from the TV show The Office.
|
@@ -225,9 +225,18 @@ def get_llama_response(message):
|
|
225 |
prompt = PromptTemplate(input_variables=["history", "input"], template=template+'ss'+ s)
|
226 |
|
227 |
#print(template)
|
228 |
-
|
229 |
-
|
230 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
231 |
|
232 |
import gradio as gr
|
233 |
|
|
|
202 |
query_text = message
|
203 |
|
204 |
|
205 |
+
results = db.similarity_search_with_relevance_scores(query_text, k=3)
|
206 |
if len(results) == 0 or results[0][1] < 0.5:
|
207 |
print(f"Unable to find matching results.")
|
208 |
|
209 |
+
|
210 |
+
context_text = "\n\n---\n\n".join([doc.page_content for doc, _score in results])
|
211 |
|
212 |
template = """
|
213 |
The following is a conversation between a human an AI. The AI acts exactly like Dwight K Schrute from the TV show The Office.
|
|
|
225 |
prompt = PromptTemplate(input_variables=["history", "input"], template=template+'ss'+ s)
|
226 |
|
227 |
#print(template)
|
228 |
+
inputs = tokenizer(query_text, return_tensors="pt").to("cuda")
|
229 |
+
|
230 |
+
# Generate text
|
231 |
+
with torch.no_grad():
|
232 |
+
outputs = model.generate(inputs.input_ids, max_length=50)
|
233 |
+
|
234 |
+
# Decode the generated text
|
235 |
+
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
236 |
+
|
237 |
+
#chain.prompt=prompt
|
238 |
+
#res = chain(query_text)
|
239 |
+
return(generated_text)
|
240 |
|
241 |
import gradio as gr
|
242 |
|