Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -40,6 +40,10 @@ def get_context_by_model(model_name):
|
|
40 |
return model_context_limits.get(model_name, None)
|
41 |
|
42 |
def get_messages_formatter_type(model_name):
|
|
|
|
|
|
|
|
|
43 |
model_name = model_name.lower()
|
44 |
if "mistral" in model_name:
|
45 |
return MessagesFormatterType.MISTRAL
|
@@ -95,6 +99,10 @@ def respond(
|
|
95 |
top_k,
|
96 |
repeat_penalty,
|
97 |
):
|
|
|
|
|
|
|
|
|
98 |
global llm
|
99 |
global llm_model
|
100 |
chat_template = get_messages_formatter_type(model)
|
@@ -193,7 +201,7 @@ demo = gr.ChatInterface(
|
|
193 |
gr.Dropdown([
|
194 |
'Mistral-7B-Instruct-v0.3'
|
195 |
],
|
196 |
-
value="Mistral-7B-Instruct-v0.3",
|
197 |
label="Model"
|
198 |
),
|
199 |
gr.Textbox(value=web_search_system_prompt, label="System message"),
|
|
|
40 |
return model_context_limits.get(model_name, None)
|
41 |
|
42 |
def get_messages_formatter_type(model_name):
|
43 |
+
if model_name is None:
|
44 |
+
logging.warning("Model name is None. Defaulting to CHATML formatter.")
|
45 |
+
return MessagesFormatterType.CHATML
|
46 |
+
|
47 |
model_name = model_name.lower()
|
48 |
if "mistral" in model_name:
|
49 |
return MessagesFormatterType.MISTRAL
|
|
|
99 |
top_k,
|
100 |
repeat_penalty,
|
101 |
):
|
102 |
+
if model is None:
|
103 |
+
logging.error("Model is None. Please select a valid model.")
|
104 |
+
return "Error: No model selected. Please choose a valid model."
|
105 |
+
|
106 |
global llm
|
107 |
global llm_model
|
108 |
chat_template = get_messages_formatter_type(model)
|
|
|
201 |
gr.Dropdown([
|
202 |
'Mistral-7B-Instruct-v0.3'
|
203 |
],
|
204 |
+
value="Mistral-7B-Instruct-v0.3", # This should match exactly
|
205 |
label="Model"
|
206 |
),
|
207 |
gr.Textbox(value=web_search_system_prompt, label="System message"),
|