Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -92,7 +92,7 @@ def apply_llama3_chat_template(conversation, add_generation_prompt=True):
|
|
92 |
prompt += "<|ASSISTANT|>\n"
|
93 |
return prompt
|
94 |
|
95 |
-
@spaces.GPU(
|
96 |
def generate_response(message, chat_history, system_prompt, temperature, max_tokens, top_p, top_k, repetition_penalty):
|
97 |
# Build the conversation history.
|
98 |
conversation = [{"role": "system", "content": system_prompt}]
|
|
|
92 |
prompt += "<|ASSISTANT|>\n"
|
93 |
return prompt
|
94 |
|
95 |
+
@spaces.GPU()
|
96 |
def generate_response(message, chat_history, system_prompt, temperature, max_tokens, top_p, top_k, repetition_penalty):
|
97 |
# Build the conversation history.
|
98 |
conversation = [{"role": "system", "content": system_prompt}]
|