Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import torch
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import gradio as gr
|
4 |
|
5 |
-
model_id = "
|
6 |
|
7 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
8 |
model = AutoModelForCausalLM.from_pretrained(model_id)
|
@@ -35,12 +35,17 @@ def respond(user_message, history):
|
|
35 |
history.append((user_message, generated_text))
|
36 |
return history, history
|
37 |
|
|
|
|
|
|
|
38 |
with gr.Blocks() as demo:
|
39 |
chatbot = gr.Chatbot()
|
40 |
message = gr.Textbox(placeholder="Введите вопрос...")
|
41 |
state = gr.State([])
|
42 |
|
|
|
43 |
message.submit(respond, inputs=[message, state], outputs=[chatbot, state])
|
44 |
-
|
|
|
45 |
|
46 |
demo.launch(share=True)
|
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import gradio as gr
|
4 |
|
5 |
+
model_id = "cody82/unitrip"
|
6 |
|
7 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
8 |
model = AutoModelForCausalLM.from_pretrained(model_id)
|
|
|
35 |
history.append((user_message, generated_text))
|
36 |
return history, history
|
37 |
|
38 |
+
def clear_textbox():
|
39 |
+
return ""
|
40 |
+
|
41 |
with gr.Blocks() as demo:
|
42 |
chatbot = gr.Chatbot()
|
43 |
message = gr.Textbox(placeholder="Введите вопрос...")
|
44 |
state = gr.State([])
|
45 |
|
46 |
+
# При отправке сообщения вызываем respond, обновляем чат и состояние
|
47 |
message.submit(respond, inputs=[message, state], outputs=[chatbot, state])
|
48 |
+
# Очищаем поле ввода после отправки
|
49 |
+
message.submit(clear_textbox, inputs=[], outputs=[message])
|
50 |
|
51 |
demo.launch(share=True)
|