File size: 2,235 Bytes
b04a659
b041fc3
 
3538aac
bf0e3ce
3538aac
b041fc3
 
 
bf0e3ce
 
 
b041fc3
 
 
bf0e3ce
 
 
 
 
 
 
60e3903
bf0e3ce
60e3903
bf0e3ce
 
 
60e3903
 
 
 
bf0e3ce
 
60e3903
 
 
 
bf0e3ce
 
60e3903
 
bf0e3ce
 
 
 
60e3903
bf0e3ce
 
 
 
 
 
60e3903
bf0e3ce
 
60e3903
bf0e3ce
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import gradio as gr
import time
import re

MODELS = ["Mixtral-8x7B-Instruct-v0.1"]

def chat_with_ai(message, chat_history, system_prompt):
    """Formats the chat history for the API call."""
    messages = [{"role": "system", "content": system_prompt}]
    for item in chat_history:
        messages.append({"role": "user", "content": item["user"]})
        messages.append({"role": "assistant", "content": item.get("assistant", "")})
    messages.append({"role": "user", "content": message})
    return messages

def respond(message, chat_history, model, system_prompt, thinking_budget):
    """Simulate API call and get the response. Replace with actual API call."""
    # Simulate a delay
    time.sleep(1)
    # Dummy response
    response = f"Simulated response for: {message}"
    return response, 1.0

def generate(message, history, model, system_prompt, thinking_budget):
    """Generates the chatbot response."""
    response, thinking_time = respond(message, history, model, system_prompt, thinking_budget)
    history.append({"user": message, "assistant": response})
    return history, ""

# Define the default system prompt
DEFAULT_SYSTEM_PROMPT = """
You are a helpful assistant in normal conversation.
When given a problem to solve, you are an expert problem-solving assistant.
Your task is to provide a detailed, step-by-step solution to a given question.
...
"""

with gr.Blocks() as demo:
    gr.Markdown("# Custom Chat Interface")
    
    with gr.Row():
        model = gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[0])
        thinking_budget = gr.Slider(minimum=1, maximum=100, value=10, step=1, label="Thinking Budget")
    
    system_prompt = gr.Textbox(value=DEFAULT_SYSTEM_PROMPT, lines=15, label="System Prompt")
    chatbot = gr.Chatbot(label="Chat", type="messages")
    msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message...")
    
    # Clear chat
    def clear_chat():
        return [], ""
    
    gr.Button("Clear Chat").click(clear_chat, inputs=None, outputs=[chatbot, msg])

    # Generate response on message submission
    msg.submit(generate, inputs=[msg, chatbot, model, system_prompt, thinking_budget], outputs=[chatbot, msg])

demo.launch()