vortex123's picture
Update app.py
dbf90b5 verified
raw
history blame
1.76 kB
import gradio as gr
MODELS = ["Mixtral-8x7B-Instruct-v0.1"]
def chat_with_ai(message, chat_history, system_prompt):
"""Formats the chat history for processing."""
history = [{"role": "system", "content": system_prompt}]
history.extend(chat_history)
history.append({"role": "user", "content": message})
return history
def generate(message, chat_history, model, system_prompt, thinking_budget):
"""Simulates response generation."""
# Dummy logic for simulating a response
response = f"Simulated response for: {message}"
# Update chat history
chat_history.append({"role": "user", "content": message})
chat_history.append({"role": "assistant", "content": response})
return chat_history, ""
DEFAULT_SYSTEM_PROMPT = """
You are a helpful assistant in normal conversation.
When given a problem to solve, you are an expert problem-solving assistant.
Your task is to provide a detailed, step-by-step solution to a given question.
...
"""
with gr.Blocks() as demo:
gr.Markdown("# Custom Chat Interface")
with gr.Row():
model = gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[0])
thinking_budget = gr.Slider(minimum=1, maximum=100, value=10, step=1, label="Thinking Budget")
system_prompt = gr.Textbox(value=DEFAULT_SYSTEM_PROMPT, lines=15, label="System Prompt")
chatbot = gr.Chatbot(label="Chat")
msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message...")
def clear_chat():
return [], ""
gr.Button("Clear Chat").click(clear_chat, inputs=None, outputs=[chatbot, msg])
msg.submit(generate, inputs=[msg, chatbot, model, system_prompt, thinking_budget], outputs=[chatbot, msg])
demo.launch()