AminFaraji commited on
Commit
33e1e9e
·
verified ·
1 Parent(s): 1af7717

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -225,10 +225,10 @@ def get_llama_response(message):
225
  AI:""".strip()
226
 
227
 
228
- prompt = PromptTemplate(input_variables=["history", "input"], template=template+'ss'+ s)
229
 
230
- #print(template)
231
- inputs = tokenizer(query_text, return_tensors="pt").to("cuda")
232
 
233
  # Generate text
234
  with torch.no_grad():
@@ -236,9 +236,6 @@ def get_llama_response(message):
236
 
237
  # Decode the generated text
238
  generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
239
-
240
- #chain.prompt=prompt
241
- #res = chain(query_text)
242
  return(generated_text)
243
 
244
  import gradio as gr
 
225
  AI:""".strip()
226
 
227
 
228
+ input_text = query_text
229
 
230
+ # Tokenize the input text
231
+ inputs = tokenizer(input_text, return_tensors="pt").to("cuda")
232
 
233
  # Generate text
234
  with torch.no_grad():
 
236
 
237
  # Decode the generated text
238
  generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
 
 
239
  return(generated_text)
240
 
241
  import gradio as gr