Update app.py
Browse files
app.py
CHANGED
@@ -3,22 +3,22 @@ from transformers import pipeline
|
|
3 |
import torch
|
4 |
import logging
|
5 |
|
6 |
-
# Настройка логирования
|
7 |
logging.basicConfig(level=logging.INFO)
|
8 |
logger = logging.getLogger(__name__)
|
9 |
|
10 |
-
# Загружаем модель
|
11 |
model_name = "distilgpt2"
|
12 |
try:
|
13 |
logger.info(f"Попытка загрузки модели {model_name}...")
|
14 |
generator = pipeline(
|
15 |
"text-generation",
|
16 |
model=model_name,
|
17 |
-
device=-1,
|
18 |
framework="pt",
|
19 |
max_length=512,
|
20 |
truncation=True,
|
21 |
-
model_kwargs={"torch_dtype": torch.float32}
|
22 |
)
|
23 |
logger.info("Модель успешно загружена.")
|
24 |
except Exception as e:
|
@@ -27,13 +27,11 @@ except Exception as e:
|
|
27 |
|
28 |
def respond(message, history, max_tokens=256, temperature=0.7, top_p=0.9):
|
29 |
history = history or []
|
30 |
-
# Формируем входной текст
|
31 |
input_text = ""
|
32 |
for user_msg, bot_msg in history:
|
33 |
input_text += f"User: {user_msg}\nAssistant: {bot_msg}\n"
|
34 |
input_text += f"User: {message}"
|
35 |
|
36 |
-
# Генерация ответа
|
37 |
try:
|
38 |
logger.info(f"Генерация ответа для: {message}")
|
39 |
outputs = generator(
|
@@ -51,7 +49,6 @@ def respond(message, history, max_tokens=256, temperature=0.7, top_p=0.9):
|
|
51 |
logger.error(f"Ошибка генерации ответа: {e}")
|
52 |
return f"Ошибка генерации: {e}", history
|
53 |
|
54 |
-
# Форматируем ответ
|
55 |
formatted_response = format_response(response)
|
56 |
history.append((message, formatted_response))
|
57 |
return formatted_response, history
|
@@ -73,10 +70,9 @@ def extract_treatment(response):
|
|
73 |
sentences = response.split(".")
|
74 |
return sentences[-1].strip() if len(sentences) > 1 else "Не указано"
|
75 |
|
76 |
-
# Gradio интерфейс
|
77 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
78 |
gr.Markdown("## Медицинский чат-бот на базе DistilGPT-2")
|
79 |
-
chatbot = gr.Chatbot(label="Чат", height=400)
|
80 |
with gr.Row():
|
81 |
msg = gr.Textbox(
|
82 |
label="Ваше сообщение",
|
@@ -101,21 +97,18 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
101 |
def clear_chat():
|
102 |
return [], [], ""
|
103 |
|
104 |
-
# Кнопка "Отправить"
|
105 |
submit_btn.click(
|
106 |
fn=submit_message,
|
107 |
inputs=[msg, state, max_tokens, temperature, top_p],
|
108 |
outputs=[chatbot, state, msg],
|
109 |
queue=True
|
110 |
)
|
111 |
-
# Поддержка Enter
|
112 |
msg.submit(
|
113 |
fn=submit_message,
|
114 |
inputs=[msg, state, max_tokens, temperature, top_p],
|
115 |
outputs=[chatbot, state, msg],
|
116 |
queue=True
|
117 |
)
|
118 |
-
# Кнопка "Очистить"
|
119 |
clear_btn.click(
|
120 |
fn=clear_chat,
|
121 |
outputs=[chatbot, state, msg]
|
|
|
3 |
import torch
|
4 |
import logging
|
5 |
|
6 |
+
# Настройка логирования
|
7 |
logging.basicConfig(level=logging.INFO)
|
8 |
logger = logging.getLogger(__name__)
|
9 |
|
10 |
+
# Загружаем модель
|
11 |
model_name = "distilgpt2"
|
12 |
try:
|
13 |
logger.info(f"Попытка загрузки модели {model_name}...")
|
14 |
generator = pipeline(
|
15 |
"text-generation",
|
16 |
model=model_name,
|
17 |
+
device=-1,
|
18 |
framework="pt",
|
19 |
max_length=512,
|
20 |
truncation=True,
|
21 |
+
model_kwargs={"torch_dtype": torch.float32}
|
22 |
)
|
23 |
logger.info("Модель успешно загружена.")
|
24 |
except Exception as e:
|
|
|
27 |
|
28 |
def respond(message, history, max_tokens=256, temperature=0.7, top_p=0.9):
|
29 |
history = history or []
|
|
|
30 |
input_text = ""
|
31 |
for user_msg, bot_msg in history:
|
32 |
input_text += f"User: {user_msg}\nAssistant: {bot_msg}\n"
|
33 |
input_text += f"User: {message}"
|
34 |
|
|
|
35 |
try:
|
36 |
logger.info(f"Генерация ответа для: {message}")
|
37 |
outputs = generator(
|
|
|
49 |
logger.error(f"Ошибка генерации ответа: {e}")
|
50 |
return f"Ошибка генерации: {e}", history
|
51 |
|
|
|
52 |
formatted_response = format_response(response)
|
53 |
history.append((message, formatted_response))
|
54 |
return formatted_response, history
|
|
|
70 |
sentences = response.split(".")
|
71 |
return sentences[-1].strip() if len(sentences) > 1 else "Не указано"
|
72 |
|
|
|
73 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
74 |
gr.Markdown("## Медицинский чат-бот на базе DistilGPT-2")
|
75 |
+
chatbot = gr.Chatbot(label="Чат", height=400, type='messages')
|
76 |
with gr.Row():
|
77 |
msg = gr.Textbox(
|
78 |
label="Ваше сообщение",
|
|
|
97 |
def clear_chat():
|
98 |
return [], [], ""
|
99 |
|
|
|
100 |
submit_btn.click(
|
101 |
fn=submit_message,
|
102 |
inputs=[msg, state, max_tokens, temperature, top_p],
|
103 |
outputs=[chatbot, state, msg],
|
104 |
queue=True
|
105 |
)
|
|
|
106 |
msg.submit(
|
107 |
fn=submit_message,
|
108 |
inputs=[msg, state, max_tokens, temperature, top_p],
|
109 |
outputs=[chatbot, state, msg],
|
110 |
queue=True
|
111 |
)
|
|
|
112 |
clear_btn.click(
|
113 |
fn=clear_chat,
|
114 |
outputs=[chatbot, state, msg]
|