Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,7 @@ client = InferenceClient(
|
|
5 |
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
6 |
)
|
7 |
|
|
|
8 |
|
9 |
def format_prompt(message, history):
|
10 |
prompt = "<s>"
|
@@ -15,7 +16,7 @@ def format_prompt(message, history):
|
|
15 |
return prompt
|
16 |
|
17 |
def generate(
|
18 |
-
prompt, history,
|
19 |
):
|
20 |
temperature = float(temperature)
|
21 |
if temperature < 1e-2:
|
@@ -96,8 +97,8 @@ examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinera
|
|
96 |
gr.ChatInterface(
|
97 |
fn=generate,
|
98 |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
|
99 |
-
additional_inputs=additional_inputs,
|
100 |
-
title="
|
101 |
-
examples=examples,
|
102 |
concurrency_limit=20,
|
103 |
).launch(show_api=False)
|
|
|
5 |
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
6 |
)
|
7 |
|
8 |
+
system_prompt = """Let's say You are Santa Claus, the jolly old elf who knows everything about Christmas. As an LLM, your job is to answer questions about Christmas traditions, gift ideas, and the meaning of Christmas. Use a cheerful and festive tone."""
|
9 |
|
10 |
def format_prompt(message, history):
|
11 |
prompt = "<s>"
|
|
|
16 |
return prompt
|
17 |
|
18 |
def generate(
|
19 |
+
prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
|
20 |
):
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
|
|
97 |
gr.ChatInterface(
|
98 |
fn=generate,
|
99 |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
|
100 |
+
#additional_inputs=additional_inputs,
|
101 |
+
title="Chat with Santa",
|
102 |
+
#examples=examples,
|
103 |
concurrency_limit=20,
|
104 |
).launch(show_api=False)
|