Spaces:
Runtime error
Runtime error
Slider
Browse files
app.py
CHANGED
@@ -19,25 +19,25 @@ examples = [
|
|
19 |
Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?"""]
|
20 |
]
|
21 |
|
22 |
-
title = "Upword
|
23 |
-
description = "This demo compares [BART-Large-CNN](https://huggingface.co/facebook/bart-large-cnn) and [Flan-T5-XXL](https://huggingface.co/google/flan-t5-xxl)."
|
24 |
|
25 |
token = os.environ["token"]
|
26 |
|
27 |
urls = {
|
28 |
'flan-t5': os.environ["url"],
|
29 |
# 'flan-t5': "https://api-inference.huggingface.co/models/philschmid/flan-t5-xxl-sharded-fp16",
|
30 |
-
'bart-large-cnn': "https://api-inference.huggingface.co/models/facebook/bart-large-cnn"
|
|
|
31 |
}
|
32 |
|
33 |
|
34 |
-
def inference(text):
|
35 |
headers = {"Authorization": f"Bearer {token}"}
|
36 |
payload = {
|
37 |
"inputs": text,
|
38 |
"parameters": {
|
39 |
-
"min_length":
|
40 |
-
"max_length":
|
41 |
"do_sample": False
|
42 |
}
|
43 |
}
|
@@ -46,19 +46,24 @@ def inference(text):
|
|
46 |
responses[model] = requests.post(url, headers=headers, json=payload)
|
47 |
|
48 |
output_flan = responses['flan-t5'].json()[0]['generated_text']
|
49 |
-
|
50 |
-
|
|
|
51 |
|
52 |
|
53 |
io = gr.Interface(
|
54 |
inference,
|
55 |
-
|
|
|
|
|
|
|
|
|
56 |
outputs=[
|
57 |
gr.Textbox(lines=3, label="Flan T5-XXL"),
|
58 |
-
gr.Textbox(lines=3, label="BART-Large-CNN")
|
|
|
59 |
],
|
60 |
title=title,
|
61 |
-
description=description,
|
62 |
examples=examples
|
63 |
)
|
64 |
io.launch()
|
|
|
19 |
Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?"""]
|
20 |
]
|
21 |
|
22 |
+
title = "Upword. Model Playground"
|
|
|
23 |
|
24 |
token = os.environ["token"]
|
25 |
|
26 |
urls = {
|
27 |
'flan-t5': os.environ["url"],
|
28 |
# 'flan-t5': "https://api-inference.huggingface.co/models/philschmid/flan-t5-xxl-sharded-fp16",
|
29 |
+
'bart-large-cnn': "https://api-inference.huggingface.co/models/facebook/bart-large-cnn",
|
30 |
+
'pegasus': "https://api-inference.huggingface.co/models/google/pegasus-xsum"
|
31 |
}
|
32 |
|
33 |
|
34 |
+
def inference(text, min_length, max_length):
|
35 |
headers = {"Authorization": f"Bearer {token}"}
|
36 |
payload = {
|
37 |
"inputs": text,
|
38 |
"parameters": {
|
39 |
+
"min_length": min_length,
|
40 |
+
"max_length": max_length,
|
41 |
"do_sample": False
|
42 |
}
|
43 |
}
|
|
|
46 |
responses[model] = requests.post(url, headers=headers, json=payload)
|
47 |
|
48 |
output_flan = responses['flan-t5'].json()[0]['generated_text']
|
49 |
+
output_bart = responses['bart-large-cnn'].json()[0]['summary_text']
|
50 |
+
output_gpt = responses['pegasus'].json()[0]['summary_text']
|
51 |
+
return [output_flan, output_bart, output_gpt]
|
52 |
|
53 |
|
54 |
io = gr.Interface(
|
55 |
inference,
|
56 |
+
inputs=[
|
57 |
+
gr.Textbox(label='Input', lines=3),
|
58 |
+
gr.Slider(minimum=1, maximum=160, value=20, label="min_length"),
|
59 |
+
gr.Slider(minimum=1, maximum=160, value=80, label="max_length")
|
60 |
+
],
|
61 |
outputs=[
|
62 |
gr.Textbox(lines=3, label="Flan T5-XXL"),
|
63 |
+
gr.Textbox(lines=3, label="BART-Large-CNN"),
|
64 |
+
gr.Textbox(lines=3, label="Pegasus")
|
65 |
],
|
66 |
title=title,
|
|
|
67 |
examples=examples
|
68 |
)
|
69 |
io.launch()
|