Update app.py
Browse files
app.py
CHANGED
|
@@ -12,21 +12,22 @@ tokenizer = BertTokenizer.from_pretrained(".")
|
|
| 12 |
model = TFGPT2LMHeadModel.from_pretrained(".")
|
| 13 |
text_generator = TextGenerationPipeline(model, tokenizer)
|
| 14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
def poem(cls, max_length):
|
| 16 |
txt = text_generator('[CLS]'+cls, max_length=int(max_length), do_sample=True)
|
| 17 |
return txt[0]['generated_text'].replace('[CLS]','').replace('[SEP]','').replace(' ','').replace('。','。\n')
|
| 18 |
|
| 19 |
iface = gr.Interface(
|
| 20 |
-
gr.Markdown(
|
| 21 |
-
"""
|
| 22 |
-
# 写一首诗
|
| 23 |
-
模型:https://huggingface.co/uer/gpt2-chinese-poem
|
| 24 |
-
"""),
|
| 25 |
fn=poem,
|
| 26 |
inputs=[
|
| 27 |
-
gr.Textbox("今晚提测又加班,BUG一堆改不完。", lines=1, label="
|
| 28 |
gr.Number(66, label='字数:'),
|
| 29 |
],
|
| 30 |
-
outputs=gr.Textbox(lines=6)
|
|
|
|
|
|
|
| 31 |
|
| 32 |
iface.launch()
|
|
|
|
| 12 |
model = TFGPT2LMHeadModel.from_pretrained(".")
|
| 13 |
text_generator = TextGenerationPipeline(model, tokenizer)
|
| 14 |
|
| 15 |
+
title = "写一首诗"
|
| 16 |
+
article = "<div style='text-align: center;'>使用的模型:<a href='https://huggingface.co/uer/gpt2-chinese-poem' target='_blank'>https://huggingface.co/uer/gpt2-chinese-poem</a></div>"
|
| 17 |
+
|
| 18 |
+
|
| 19 |
def poem(cls, max_length):
|
| 20 |
txt = text_generator('[CLS]'+cls, max_length=int(max_length), do_sample=True)
|
| 21 |
return txt[0]['generated_text'].replace('[CLS]','').replace('[SEP]','').replace(' ','').replace('。','。\n')
|
| 22 |
|
| 23 |
iface = gr.Interface(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
fn=poem,
|
| 25 |
inputs=[
|
| 26 |
+
gr.Textbox("今晚提测又加班,BUG一堆改不完。", lines=1, label="请输入第一句:"),
|
| 27 |
gr.Number(66, label='字数:'),
|
| 28 |
],
|
| 29 |
+
outputs=gr.Textbox(lines=6),
|
| 30 |
+
title=title,
|
| 31 |
+
article=article)
|
| 32 |
|
| 33 |
iface.launch()
|