|
import gradio as gr |
|
|
|
|
|
|
|
def load_model(model_name): |
|
print(f"Loading {model_name}...") |
|
|
|
if model_name == "DeepSeek-R1-Distill-Qwen-32B": |
|
return lambda input_text, history: f"Distilled Model Response to: {input_text}" |
|
elif model_name == "DeepSeek-R1": |
|
return lambda input_text, history: f"Base Model Response to: {input_text}" |
|
elif model_name == "DeepSeek-R1-Zero": |
|
return lambda input_text, history: f"Zero Model Response to: {input_text}" |
|
else: |
|
return lambda input_text, history: f"Default Response to: {input_text}" |
|
|
|
|
|
deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B") |
|
deepseek_r1 = load_model("DeepSeek-R1") |
|
deepseek_r1_zero = load_model("DeepSeek-R1-Zero") |
|
|
|
|
|
def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p): |
|
history = history or [] |
|
print(f"Input: {input_text}, History: {history}, Model: {model_choice}") |
|
|
|
|
|
if model_choice == "DeepSeek-R1-Distill-Qwen-32B": |
|
model_function = deepseek_r1_distill |
|
elif model_choice == "DeepSeek-R1": |
|
model_function = deepseek_r1 |
|
elif model_choice == "DeepSeek-R1-Zero": |
|
model_function = deepseek_r1_zero |
|
else: |
|
model_function = lambda x, h: "Please select a model." |
|
|
|
|
|
|
|
response = model_function(input_text, history) |
|
response = f"**System Message:** {system_message}\n\n**Model Response:** {response}\n\n" \ |
|
f"**Parameters Used:**\n- Max New Tokens: {max_new_tokens}\n- Temperature: {temperature}\n- Top-p: {top_p}" |
|
|
|
history.append((input_text, response)) |
|
return history, history, "" |
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Soft()) as demo: |
|
gr.Markdown( |
|
""" |
|
# DeepSeek Chatbot |
|
Created by [ruslanmv.com](https://ruslanmv.com/) |
|
|
|
This is a demo of different DeepSeek models. Select a model, type your message, and click "Submit". |
|
You can also adjust optional parameters like system message, max new tokens, temperature, and top-p. |
|
""" |
|
) |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=4): |
|
chatbot_output = gr.Chatbot(label="DeepSeek Chatbot", height=500) |
|
msg = gr.Textbox(label="Your Message", placeholder="Type your message here...") |
|
|
|
with gr.Row(): |
|
submit_btn = gr.Button("Submit", variant="primary") |
|
clear_btn = gr.ClearButton([msg, chatbot_output]) |
|
|
|
with gr.Column(scale=1): |
|
model_choice = gr.Radio( |
|
choices=["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"], |
|
label="Choose a Model", |
|
value="DeepSeek-R1" |
|
) |
|
|
|
with gr.Accordion("Optional Parameters", open=False): |
|
system_message = gr.Textbox( |
|
label="System Message", |
|
value="You are a friendly Chatbot created by ruslanmv.com", |
|
lines=2, |
|
) |
|
max_new_tokens = gr.Slider( |
|
minimum=1, maximum=4000, value=200, label="Max New Tokens" |
|
) |
|
temperature = gr.Slider( |
|
minimum=0.10, maximum=4.00, value=0.70, label="Temperature" |
|
) |
|
top_p = gr.Slider( |
|
minimum=0.10, maximum=1.00, value=0.90, label="Top-p (nucleus sampling)" |
|
) |
|
|
|
|
|
chat_history = gr.State([]) |
|
|
|
|
|
submit_btn.click( |
|
chatbot, |
|
[msg, chat_history, model_choice, system_message, max_new_tokens, temperature, top_p], |
|
[chatbot_output, chat_history, msg], |
|
) |
|
msg.submit( |
|
chatbot, |
|
[msg, chat_history, model_choice, system_message, max_new_tokens, temperature, top_p], |
|
[chatbot_output, chat_history, msg], |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |