Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -21,8 +21,8 @@ tokenizer.pad_token_id = tokenizer.eos_token_id
|
|
21 |
|
22 |
def generate_answer(question):
|
23 |
#inputs = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
24 |
-
inputs = tokenizer
|
25 |
-
outputs = model.generate(inputs, max_length=2000, num_return_sequences=1, do_sample=True)
|
26 |
answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
27 |
return answer
|
28 |
|
|
|
21 |
|
22 |
def generate_answer(question):
|
23 |
#inputs = tokenizer.apply_chat_template(messages, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
24 |
+
inputs = tokenizer(question, return_tensors="pt")
|
25 |
+
outputs = model.generate(**inputs, max_length=2000, num_return_sequences=1, do_sample=True)
|
26 |
answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
27 |
return answer
|
28 |
|