Spaces:
Runtime error
Runtime error
Commit
·
bc693f4
1
Parent(s):
c6bebaa
increase max_length vicuna
Browse files
app.py
CHANGED
@@ -60,7 +60,7 @@ def vicuna_respond(tab_name, message, chat_history):
|
|
60 |
print('Vicuna - Prompt + Context:')
|
61 |
print(formatted_prompt)
|
62 |
input_ids = vicuna_tokenizer.encode(formatted_prompt, return_tensors="pt")
|
63 |
-
output_ids = vicuna_model.generate(input_ids, do_sample=True, max_length=
|
64 |
bot_message = vicuna_tokenizer.decode(output_ids[0], skip_special_tokens=True)
|
65 |
# Remove formatted prompt from bot_message
|
66 |
bot_message = bot_message.replace(formatted_prompt, '')
|
|
|
60 |
print('Vicuna - Prompt + Context:')
|
61 |
print(formatted_prompt)
|
62 |
input_ids = vicuna_tokenizer.encode(formatted_prompt, return_tensors="pt")
|
63 |
+
output_ids = vicuna_model.generate(input_ids, do_sample=True, max_length=100, num_beams=5, no_repeat_ngram_size=2)
|
64 |
bot_message = vicuna_tokenizer.decode(output_ids[0], skip_special_tokens=True)
|
65 |
# Remove formatted prompt from bot_message
|
66 |
bot_message = bot_message.replace(formatted_prompt, '')
|