Spaces:
Sleeping
Sleeping
File size: 3,288 Bytes
f1fef64 317e409 539566d 317e409 539566d 317e409 539566d 2b3ca21 317e409 2b3ca21 317e409 2b3ca21 317e409 2b3ca21 317e409 f1fef64 2b3ca21 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
import gradio as gr
# Placeholder for model loading (adjust as needed for your specific models)
def load_model(model_name):
# Replace this function with actual model loading code if needed
return lambda input_text: f"Response from {model_name}: {input_text}"
# Load the models (placeholder functions here)
deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B")
deepseek_r1 = load_model("DeepSeek-R1")
deepseek_r1_zero = load_model("DeepSeek-R1-Zero")
# Define the optional parameters section
def create_optional_parameters():
with gr.Accordion("Optional Parameters (Click to Expand)", open=False):
system_message = gr.Textbox(
label="System Message",
value="You are a friendly Chatbot created by ruslanmv.com",
lines=2,
interactive=True
)
max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens", interactive=True)
temperature = gr.Slider(minimum=0.10, maximum=4.00, value=0.70, label="Temperature", interactive=True)
top_p = gr.Slider(minimum=0.10, maximum=1.00, value=0.90, label="Top-p (nucleus sampling)", interactive=True)
return system_message, max_new_tokens, temperature, top_p
# Define the main interface
def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p):
# Placeholder response - integrate with actual model here
response = f"""**System Message**: {system_message}
**Your Input**: {user_input}
**Parameters Used**:
- Max New Tokens: {max_new_tokens}
- Temperature: {temperature}
- Top-p: {top_p}
*Note: Actual model integration required for real responses*"""
return response
# Create the Gradio interface
with gr.Blocks(css="""
.gradio-container {
font-family: Arial, sans-serif;
background-color: #f9f9f9;
color: #333;
padding: 20px;
}
.gr-button.primary {
background-color: #4caf50;
color: white;
border: none;
padding: 10px 20px;
font-size: 16px;
border-radius: 5px;
cursor: pointer;
}
.gr-button.primary:hover {
background-color: #45a049;
}
.gr-textbox textarea {
font-size: 16px;
}
""") as demo:
gr.Markdown("""
# DeepSeek Chatbot
Welcome to the **DeepSeek Chatbot**! This AI-powered chatbot is designed to provide insightful responses.
Created by [ruslanmv.com](https://ruslanmv.com/).
""")
with gr.Row():
with gr.Column(scale=3):
user_input = gr.Textbox(
label="Your Message",
placeholder="Type your message here...",
lines=4,
interactive=True
)
submit_button = gr.Button("Send", variant="primary")
with gr.Column(scale=5):
output = gr.Markdown(label="Chatbot Response")
# Add the optional parameters section
system_message, max_new_tokens, temperature, top_p = create_optional_parameters()
# Link the submit button to the chat interface
submit_button.click(
chat_interface,
inputs=[user_input, system_message, max_new_tokens, temperature, top_p],
outputs=output
)
# Launch the demo
if __name__ == "__main__":
demo.launch() |