Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,7 +13,7 @@ def generate_response(input_text):
|
|
13 |
|
14 |
output_sequences = model.generate(
|
15 |
input_ids=input_ids,
|
16 |
-
max_length=
|
17 |
temperature=1.0,
|
18 |
top_k=50,
|
19 |
top_p=0.95,
|
@@ -25,10 +25,10 @@ def generate_response(input_text):
|
|
25 |
response_with_prefix = tokenizer.decode(output_sequences[:, input_ids.shape[-1]:][0], skip_special_tokens=True)
|
26 |
response_start_idx = response_with_prefix.find("answer: ")
|
27 |
if response_start_idx != -1:
|
28 |
-
|
29 |
response = response_with_prefix[response_start_idx + len("answer: "):]
|
30 |
else:
|
31 |
-
|
32 |
response = response_with_prefix
|
33 |
|
34 |
return response
|
|
|
13 |
|
14 |
output_sequences = model.generate(
|
15 |
input_ids=input_ids,
|
16 |
+
max_length=300,
|
17 |
temperature=1.0,
|
18 |
top_k=50,
|
19 |
top_p=0.95,
|
|
|
25 |
response_with_prefix = tokenizer.decode(output_sequences[:, input_ids.shape[-1]:][0], skip_special_tokens=True)
|
26 |
response_start_idx = response_with_prefix.find("answer: ")
|
27 |
if response_start_idx != -1:
|
28 |
+
|
29 |
response = response_with_prefix[response_start_idx + len("answer: "):]
|
30 |
else:
|
31 |
+
|
32 |
response = response_with_prefix
|
33 |
|
34 |
return response
|