Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -10,8 +10,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
|
10 |
# Update model configuration for Mistral-small-24B
|
11 |
MODEL_ID = "baconnier/Napoleon_24B_V0.0"
|
12 |
CHAT_TEMPLATE = "mistral" # Mistral uses its own chat template
|
13 |
-
MODEL_NAME = "Napoleon
|
14 |
-
CONTEXT_LENGTH =
|
15 |
COLOR = "black"
|
16 |
EMOJI = "π«π·" # Mistral-themed emoji
|
17 |
DESCRIPTION = f"This is {MODEL_NAME} model, a powerful 24B parameter language model from Mistral AI."
|
@@ -143,7 +143,8 @@ gr.ChatInterface(
|
|
143 |
additional_inputs=[
|
144 |
gr.Textbox(SYSTEM_MESSAGE, label="System prompt", visible=False), # Hidden system prompt
|
145 |
gr.Slider(0, 1, 0.7, label="Temperature"), # Adjusted default for Mistral
|
146 |
-
gr.Slider(0,
|
|
|
147 |
gr.Slider(1, 100, 50, label="Top K sampling"),
|
148 |
gr.Slider(0, 2, 1.1, label="Repetition penalty"),
|
149 |
gr.Slider(0, 1, 0.95, label="Top P sampling"),
|
|
|
10 |
# Update model configuration for Mistral-small-24B
|
11 |
MODEL_ID = "baconnier/Napoleon_24B_V0.0"
|
12 |
CHAT_TEMPLATE = "mistral" # Mistral uses its own chat template
|
13 |
+
MODEL_NAME = "π¦
Napoleon π¦
"
|
14 |
+
CONTEXT_LENGTH = 2048 # Mistral supports longer context
|
15 |
COLOR = "black"
|
16 |
EMOJI = "π«π·" # Mistral-themed emoji
|
17 |
DESCRIPTION = f"This is {MODEL_NAME} model, a powerful 24B parameter language model from Mistral AI."
|
|
|
143 |
additional_inputs=[
|
144 |
gr.Textbox(SYSTEM_MESSAGE, label="System prompt", visible=False), # Hidden system prompt
|
145 |
gr.Slider(0, 1, 0.7, label="Temperature"), # Adjusted default for Mistral
|
146 |
+
gr.Slider(0, 2048, 1024, label="Max new tokens"), # Increased for longer context
|
147 |
+
#gr.Slider(0, 32768, 12000, label="Max new tokens"), # Increased for longer context
|
148 |
gr.Slider(1, 100, 50, label="Top K sampling"),
|
149 |
gr.Slider(0, 2, 1.1, label="Repetition penalty"),
|
150 |
gr.Slider(0, 1, 0.95, label="Top P sampling"),
|