Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -10,10 +10,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
|
10 |
# Update model configuration for Mistral-small-24B
|
11 |
MODEL_ID = "baconnier/Napoleon_24B_V0.0"
|
12 |
CHAT_TEMPLATE = "mistral" # Mistral uses its own chat template
|
13 |
-
MODEL_NAME = "
|
14 |
CONTEXT_LENGTH = 2048 # Mistral supports longer context
|
15 |
COLOR = "black"
|
16 |
-
EMOJI = "
|
17 |
DESCRIPTION = f"This is {MODEL_NAME} model, a powerful 24B parameter language model from Mistral AI."
|
18 |
|
19 |
css = """
|
@@ -128,7 +128,7 @@ torch_dtype=torch.bfloat16
|
|
128 |
gr.ChatInterface(
|
129 |
predict,
|
130 |
css=css,
|
131 |
-
title=EMOJI + " " + MODEL_NAME,
|
132 |
description=DESCRIPTION,
|
133 |
examples=[
|
134 |
['Pourquoi les Français portent-ils toujours une baguette sous le bras ?'],
|
|
|
10 |
# Update model configuration for Mistral-small-24B
|
11 |
MODEL_ID = "baconnier/Napoleon_24B_V0.0"
|
12 |
CHAT_TEMPLATE = "mistral" # Mistral uses its own chat template
|
13 |
+
MODEL_NAME = "NAPOLEON"
|
14 |
CONTEXT_LENGTH = 2048 # Mistral supports longer context
|
15 |
COLOR = "black"
|
16 |
+
EMOJI = "🦅" # Mistral-themed emoji
|
17 |
DESCRIPTION = f"This is {MODEL_NAME} model, a powerful 24B parameter language model from Mistral AI."
|
18 |
|
19 |
css = """
|
|
|
128 |
gr.ChatInterface(
|
129 |
predict,
|
130 |
css=css,
|
131 |
+
title=EMOJI + " " + MODEL_NAME+" 🇫🇷",
|
132 |
description=DESCRIPTION,
|
133 |
examples=[
|
134 |
['Pourquoi les Français portent-ils toujours une baguette sous le bras ?'],
|