Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -94,13 +94,14 @@ def respond(message, history: list[tuple[str, str]], model, system_message, max_
|
|
94 |
|
95 |
demo = gr.ChatInterface(
|
96 |
respond,
|
97 |
-
|
98 |
additional_inputs=[
|
99 |
gr.Dropdown(["DeepSeek-R1-Distill-Qwen-1.5B", "DeepSeek-R1-Distill-Qwen-32B", "Gemma-2-2b", "Gemma-7b", "Llama2-13b-chat", "Llama3-8b-Instruct", "Llama3.1-8b-Instruct", "Microsoft-phi-2", "Mixtral-8x7B-Instruct", "Zephr-7b-beta"], label="Select Model"),
|
100 |
gr.Textbox(value="You are a friendly and helpful Chatbot, be concise and straight to the point, avoid excessive reasoning.", label="System message"),
|
101 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
102 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
103 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
|
|
104 |
]
|
105 |
)
|
106 |
|
|
|
94 |
|
95 |
demo = gr.ChatInterface(
|
96 |
respond,
|
97 |
+
examples=["Explain quantum computing", "Explain forex trading"],
|
98 |
additional_inputs=[
|
99 |
gr.Dropdown(["DeepSeek-R1-Distill-Qwen-1.5B", "DeepSeek-R1-Distill-Qwen-32B", "Gemma-2-2b", "Gemma-7b", "Llama2-13b-chat", "Llama3-8b-Instruct", "Llama3.1-8b-Instruct", "Microsoft-phi-2", "Mixtral-8x7B-Instruct", "Zephr-7b-beta"], label="Select Model"),
|
100 |
gr.Textbox(value="You are a friendly and helpful Chatbot, be concise and straight to the point, avoid excessive reasoning.", label="System message"),
|
101 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
102 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
103 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
104 |
+
|
105 |
]
|
106 |
)
|
107 |
|