|
import gradio as gr |
|
|
|
def load_model(model_name): |
|
return lambda input_text: f"Response from {model_name}: {input_text}" |
|
|
|
deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B") |
|
deepseek_r1 = load_model("DeepSeek-R1") |
|
deepseek_r1_zero = load_model("DeepSeek-R1-Zero") |
|
|
|
def create_optional_parameters(): |
|
with gr.Accordion("Optional Parameters", open=False): |
|
system_message = gr.Textbox( |
|
label="System Message", |
|
value="You are a friendly Chatbot created by ruslanmv.com", |
|
lines=2 |
|
) |
|
max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens") |
|
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, label="Temperature") |
|
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.9, label="Top-p (nucleus sampling)") |
|
return system_message, max_new_tokens, temperature, top_p |
|
|
|
def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p): |
|
response = f"""**System Message**: {system_message} |
|
**Your Input**: {user_input} |
|
**Parameters Used**: |
|
- Max New Tokens: {max_new_tokens} |
|
- Temperature: {temperature} |
|
- Top-p: {top_p} |
|
*Note: Actual model integration required for real responses*""" |
|
return response |
|
|
|
with gr.Blocks(css=""" |
|
.chat-container { max-width: 700px; margin: auto; } |
|
.chat-input { margin-top: 20px; } |
|
.chat-output { margin-top: 10px; padding: 10px; border: 1px solid #ccc; border-radius: 10px; background-color: #f9f9f9; } |
|
""") as demo: |
|
with gr.Row(variant="panel"): |
|
gr.Markdown( |
|
"""# DeepSeek Chatbot |
|
Created by [ruslanmv.com](https://ruslanmv.com/) |
|
A friendly chatbot interface. Start a conversation below! |
|
""", |
|
elem_id="header" |
|
) |
|
|
|
with gr.Row(elem_classes="chat-container"): |
|
with gr.Column(): |
|
user_input = gr.Textbox( |
|
label="Your Message", |
|
placeholder="Type your message here...", |
|
lines=3, |
|
elem_classes="chat-input" |
|
) |
|
submit_button = gr.Button("Submit", variant="primary") |
|
system_message, max_new_tokens, temperature, top_p = create_optional_parameters() |
|
with gr.Column(): |
|
output = gr.Markdown( |
|
label="Chatbot Response", |
|
elem_classes="chat-output" |
|
) |
|
|
|
submit_button.click( |
|
chat_interface, |
|
inputs=[user_input, system_message, max_new_tokens, temperature, top_p], |
|
outputs=output |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |