Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -298,7 +298,7 @@ def expand_prompt(prompt):
|
|
| 298 |
outputs = model.generate(
|
| 299 |
input_ids=input_ids,
|
| 300 |
attention_mask=attention_mask,
|
| 301 |
-
max_new_tokens=
|
| 302 |
temperature=0.2,
|
| 303 |
top_p=0.9,
|
| 304 |
do_sample=True,
|
|
@@ -312,7 +312,7 @@ def expand_prompt(prompt):
|
|
| 312 |
outputs_2 = model.generate(
|
| 313 |
input_ids=input_ids_2,
|
| 314 |
attention_mask=attention_mask_2,
|
| 315 |
-
max_new_tokens=
|
| 316 |
temperature=0.2,
|
| 317 |
top_p=0.9,
|
| 318 |
do_sample=True,
|
|
|
|
| 298 |
outputs = model.generate(
|
| 299 |
input_ids=input_ids,
|
| 300 |
attention_mask=attention_mask,
|
| 301 |
+
max_new_tokens=384,
|
| 302 |
temperature=0.2,
|
| 303 |
top_p=0.9,
|
| 304 |
do_sample=True,
|
|
|
|
| 312 |
outputs_2 = model.generate(
|
| 313 |
input_ids=input_ids_2,
|
| 314 |
attention_mask=attention_mask_2,
|
| 315 |
+
max_new_tokens=512,
|
| 316 |
temperature=0.2,
|
| 317 |
top_p=0.9,
|
| 318 |
do_sample=True,
|