Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -224,7 +224,7 @@ def llm_call(question_prompt, model_name,
|
|
224 |
top_p=1, n_samples=64, stop=None):
|
225 |
if HUGGINGFACE:
|
226 |
model_inputs = hug_tokenizer([question_prompt], return_tensors="pt").to('cuda')
|
227 |
-
generated_ids = hug_model.generate(**model_inputs, max_length=1400, temperature=1, num_return_sequences=
|
228 |
responses = hug_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
229 |
codes = []
|
230 |
for response in responses:
|
|
|
224 |
top_p=1, n_samples=64, stop=None):
|
225 |
if HUGGINGFACE:
|
226 |
model_inputs = hug_tokenizer([question_prompt], return_tensors="pt").to('cuda')
|
227 |
+
generated_ids = hug_model.generate(**model_inputs, max_length=1400, temperature=1, num_return_sequences=10, do_sample=True)
|
228 |
responses = hug_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
|
229 |
codes = []
|
230 |
for response in responses:
|