vortex123's picture
Update app.py
06d8a65 verified
raw
history blame
1.85 kB
import gradio as gr
from transformers import pipeline
# Подгружаем модель с использованием transformers
model_name = "Mixtral-8x7B-Instruct-v0.1"
generator = pipeline('text-generation', model=model_name, tokenizer=model_name)
def generate(message, chat_history, model, system_prompt):
"""Generates a response using the model."""
# Объединяем системный промпт и сообщение пользователя для контекста
prompt = system_prompt + "\n" + message
# Генерация ответа моделью
responses = generator(prompt, max_length=150, num_return_sequences=1)
response = responses[0]['generated_text'].split(prompt)[1] # Извлечение только сгенерированного ответа
# Обновление истории чата
chat_history.append((message, response))
return chat_history, ""
DEFAULT_SYSTEM_PROMPT = """
You are a helpful assistant in normal conversation.
When given a problem to solve, you are an expert problem-solving assistant.
Your task is to provide a detailed, step-by-step solution to a given question.
"""
def clear_chat():
return [], ""
with gr.Blocks() as demo:
gr.Markdown("# Custom Chat Interface")
with gr.Row():
model_dropdown = gr.Dropdown(choices=[model_name], label="Select Model", value=model_name)
system_prompt = gr.Textbox(value=DEFAULT_SYSTEM_PROMPT, lines=5, label="System Prompt")
chatbot = gr.Chatbot(label="Chat")
msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message...")
msg.submit(generate, inputs=[msg, chatbot, model_dropdown, system_prompt], outputs=[chatbot, msg])
gr.Button("Clear Chat").click(clear_chat, inputs=None, outputs=[chatbot, msg])
demo.launch()