Spaces:
Runtime error
Runtime error
Commit
·
795f139
1
Parent(s):
143f8e6
test3
Browse files
app.py
CHANGED
@@ -94,7 +94,7 @@ with open('demonstration_3_42_parse.txt', 'r') as f:
|
|
94 |
theme = gr.themes.Soft()
|
95 |
|
96 |
|
97 |
-
gpt_pipeline = pipeline(
|
98 |
#vicuna7b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-7b-v1.3")
|
99 |
#vicuna13b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-13b-v1.3")
|
100 |
#vicuna33b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-33b-v1.3")
|
@@ -131,24 +131,24 @@ def process_text(model_name, task, text):
|
|
131 |
generated_text3 = result3[0]['sequence']
|
132 |
|
133 |
return (generated_text1, generated_text2, generated_text3)
|
134 |
-
elif task == 'Chunking':
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
elif task == 'Parsing':
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
|
153 |
# Gradio interface
|
154 |
iface = gr.Interface(
|
|
|
94 |
theme = gr.themes.Soft()
|
95 |
|
96 |
|
97 |
+
gpt_pipeline = pipeline("fill-mask", model="gpt2")
|
98 |
#vicuna7b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-7b-v1.3")
|
99 |
#vicuna13b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-13b-v1.3")
|
100 |
#vicuna33b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-33b-v1.3")
|
|
|
131 |
generated_text3 = result3[0]['sequence']
|
132 |
|
133 |
return (generated_text1, generated_text2, generated_text3)
|
134 |
+
# elif task == 'Chunking':
|
135 |
+
# strategy1_format = template_all.format(text)
|
136 |
+
# strategy2_format = prompt2_chunk.format(text)
|
137 |
+
# strategy3_format = demon_chunk
|
138 |
+
|
139 |
+
# result1 = gpt_pipeline(strategy1_format)[0]['generated_text']
|
140 |
+
# result2 = gpt_pipeline(strategy2_format)[0]['generated_text']
|
141 |
+
# result3 = gpt_pipeline(strategy3_format)[0]['generated_text']
|
142 |
+
# return (result1, result2, result3)
|
143 |
+
# elif task == 'Parsing':
|
144 |
+
# strategy1_format = template_all.format(text)
|
145 |
+
# strategy2_format = prompt2_parse.format(text)
|
146 |
+
# strategy3_format = demon_parse
|
147 |
|
148 |
+
# result1 = gpt_pipeline(strategy1_format)[0]['generated_text']
|
149 |
+
# result2 = gpt_pipeline(strategy2_format)[0]['generated_text']
|
150 |
+
# result3 = gpt_pipeline(strategy3_format)[0]['generated_text']
|
151 |
+
# return (result1, result2, result3)
|
152 |
|
153 |
# Gradio interface
|
154 |
iface = gr.Interface(
|