Update app.py
Browse files
app.py
CHANGED
@@ -8,15 +8,15 @@ logging.basicConfig(level=logging.INFO)
|
|
8 |
logger = logging.getLogger(__name__)
|
9 |
|
10 |
# Загружаем модель
|
11 |
-
model_name = "
|
12 |
try:
|
13 |
logger.info(f"Попытка загрузки модели {model_name}...")
|
14 |
generator = pipeline(
|
15 |
"text-generation",
|
16 |
model=model_name,
|
17 |
-
device=-1,
|
18 |
framework="pt",
|
19 |
-
max_length=
|
20 |
truncation=True,
|
21 |
model_kwargs={"torch_dtype": torch.float32}
|
22 |
)
|
@@ -25,7 +25,7 @@ except Exception as e:
|
|
25 |
logger.error(f"Ошибка загрузки модели: {e}")
|
26 |
exit(1)
|
27 |
|
28 |
-
def respond(message, max_tokens=
|
29 |
try:
|
30 |
logger.info(f"Генерация ответа для: {message}")
|
31 |
outputs = generator(
|
@@ -34,7 +34,6 @@ def respond(message, max_tokens=256, temperature=0.7, top_p=0.9):
|
|
34 |
temperature=temperature,
|
35 |
top_p=top_p,
|
36 |
do_sample=True,
|
37 |
-
no_repeat_ngram_size=2,
|
38 |
num_return_sequences=1
|
39 |
)
|
40 |
response = outputs[0]["generated_text"].strip()
|
@@ -48,14 +47,14 @@ demo = gr.Interface(
|
|
48 |
fn=respond,
|
49 |
inputs=[
|
50 |
gr.Textbox(label="Ваше сообщение", placeholder="Опишите симптомы (например, 'Болит горло')..."),
|
51 |
-
gr.Slider(minimum=50, maximum=
|
52 |
-
gr.Slider(minimum=0.1, maximum=1.
|
53 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.9, label="Top-p")
|
54 |
],
|
55 |
outputs="text",
|
56 |
-
title="Медицинский чат-бот на базе
|
57 |
theme=gr.themes.Soft()
|
58 |
)
|
59 |
|
60 |
if __name__ == "__main__":
|
61 |
-
demo.launch(
|
|
|
8 |
logger = logging.getLogger(__name__)
|
9 |
|
10 |
# Загружаем модель
|
11 |
+
model_name = "ai-forever/rugpt-3.5B-103billion-tokens"
|
12 |
try:
|
13 |
logger.info(f"Попытка загрузки модели {model_name}...")
|
14 |
generator = pipeline(
|
15 |
"text-generation",
|
16 |
model=model_name,
|
17 |
+
device=-1, # CPU
|
18 |
framework="pt",
|
19 |
+
max_length=150, # Уменьшаем для оптимизации
|
20 |
truncation=True,
|
21 |
model_kwargs={"torch_dtype": torch.float32}
|
22 |
)
|
|
|
25 |
logger.error(f"Ошибка загрузки модели: {e}")
|
26 |
exit(1)
|
27 |
|
28 |
+
def respond(message, max_tokens=150, temperature=0.7, top_p=0.9):
|
29 |
try:
|
30 |
logger.info(f"Генерация ответа для: {message}")
|
31 |
outputs = generator(
|
|
|
34 |
temperature=temperature,
|
35 |
top_p=top_p,
|
36 |
do_sample=True,
|
|
|
37 |
num_return_sequences=1
|
38 |
)
|
39 |
response = outputs[0]["generated_text"].strip()
|
|
|
47 |
fn=respond,
|
48 |
inputs=[
|
49 |
gr.Textbox(label="Ваше сообщение", placeholder="Опишите симптомы (например, 'Болит горло')..."),
|
50 |
+
gr.Slider(minimum=50, maximum=300, value=150, step=10, label="Макс. токенов"),
|
51 |
+
gr.Slider(minimum=0.1, maximum=1.0, value=0.7, label="Температура"),
|
52 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.9, label="Top-p")
|
53 |
],
|
54 |
outputs="text",
|
55 |
+
title="Медицинский чат-бот на базе RuGPT-3.5B",
|
56 |
theme=gr.themes.Soft()
|
57 |
)
|
58 |
|
59 |
if __name__ == "__main__":
|
60 |
+
demo.launch()
|