Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,15 +2,16 @@ from huggingface_hub import InferenceClient
|
|
2 |
import gradio as gr
|
3 |
|
4 |
client = InferenceClient(
|
5 |
-
"mistralai/Mistral-7B-Instruct-v0.
|
6 |
)
|
7 |
|
8 |
|
9 |
def format_prompt(message, history):
|
10 |
prompt = "<s>"
|
11 |
for user_prompt, bot_response in history:
|
12 |
-
prompt += f""
|
13 |
-
|
|
|
14 |
return prompt
|
15 |
|
16 |
def generate(
|
@@ -88,6 +89,6 @@ gr.ChatInterface(
|
|
88 |
fn=generate,
|
89 |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, layout="panel"),
|
90 |
additional_inputs=additional_inputs,
|
91 |
-
title="Mixtral 8X7B",
|
92 |
concurrency_limit=10,
|
93 |
).launch(show_api=False)
|
|
|
2 |
import gradio as gr
|
3 |
|
4 |
client = InferenceClient(
|
5 |
+
"mistralai/Mistral-7B-Instruct-v0.2"
|
6 |
)
|
7 |
|
8 |
|
9 |
def format_prompt(message, history):
|
10 |
prompt = "<s>"
|
11 |
for user_prompt, bot_response in history:
|
12 |
+
prompt += f"[INST] {user_prompt} [/INST]"
|
13 |
+
prompt += f" {bot_response}</s> "
|
14 |
+
prompt += f"[INST] {message} [/INST]"
|
15 |
return prompt
|
16 |
|
17 |
def generate(
|
|
|
89 |
fn=generate,
|
90 |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, layout="panel"),
|
91 |
additional_inputs=additional_inputs,
|
92 |
+
title="Mixtral 8X7B-v0.2",
|
93 |
concurrency_limit=10,
|
94 |
).launch(show_api=False)
|