Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -27,6 +27,9 @@ def choose_model(model_name):
|
|
27 |
elif model_name == "Llama3.1-8b-Instruct":
|
28 |
model = "meta-llama/Llama-3.1-8B-Instruct"
|
29 |
|
|
|
|
|
|
|
30 |
client = InferenceClient(model, token=os.getenv('deepseekv2'))
|
31 |
return client
|
32 |
|
@@ -65,7 +68,7 @@ demo = gr.ChatInterface(
|
|
65 |
respond,
|
66 |
|
67 |
additional_inputs=[
|
68 |
-
gr.Dropdown(["Qwen1.5", "Llama3-8b-Instruct", "Llama3.1-8b-Instruct"], label="Select Model"),
|
69 |
gr.Textbox(value="You are a friendly and helpful Chatbot, be concise and straight to the point, avoid excessive reasoning.", label="System message"),
|
70 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
71 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
|
|
27 |
elif model_name == "Llama3.1-8b-Instruct":
|
28 |
model = "meta-llama/Llama-3.1-8B-Instruct"
|
29 |
|
30 |
+
elif model_name == "Llama2-13b-chat"
|
31 |
+
model = "meta-llama/Llama-2-13b-chat-hf"
|
32 |
+
|
33 |
client = InferenceClient(model, token=os.getenv('deepseekv2'))
|
34 |
return client
|
35 |
|
|
|
68 |
respond,
|
69 |
|
70 |
additional_inputs=[
|
71 |
+
gr.Dropdown(["Qwen1.5", "Llama2-13b-chat", "Llama3-8b-Instruct", "Llama3.1-8b-Instruct"], label="Select Model"),
|
72 |
gr.Textbox(value="You are a friendly and helpful Chatbot, be concise and straight to the point, avoid excessive reasoning.", label="System message"),
|
73 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
74 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|