Spaces:
Runtime error
Runtime error
michailroussos
commited on
Commit
·
37be440
1
Parent(s):
9202d9a
more
Browse files
app.py
CHANGED
@@ -22,7 +22,7 @@ print("Model loaded successfully!")
|
|
22 |
# Gradio Response Function
|
23 |
from transformers import TextStreamer
|
24 |
|
25 |
-
def respond(message, max_new_tokens, temperature, system_message=""):
|
26 |
try:
|
27 |
# Prepare input messages
|
28 |
messages = [{"role": "system", "content": system_message}] if system_message else []
|
@@ -52,10 +52,13 @@ def respond(message, max_new_tokens, temperature, system_message=""):
|
|
52 |
# Decode the generated tokens back to text
|
53 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
54 |
|
55 |
-
#
|
56 |
-
|
57 |
|
58 |
-
|
|
|
|
|
|
|
59 |
|
60 |
except Exception as e:
|
61 |
# Debug: Log errors
|
|
|
22 |
# Gradio Response Function
|
23 |
from transformers import TextStreamer
|
24 |
|
25 |
+
def respond(message, max_new_tokens, temperature, system_message="You are a helpful assistant. You should reply to the user's message without repeating the input."):
|
26 |
try:
|
27 |
# Prepare input messages
|
28 |
messages = [{"role": "system", "content": system_message}] if system_message else []
|
|
|
52 |
# Decode the generated tokens back to text
|
53 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
54 |
|
55 |
+
# Clean up the response by removing unwanted parts (e.g., system and user info)
|
56 |
+
cleaned_response = generated_text.split("\n")[-1] # Assuming the response ends at the last line
|
57 |
|
58 |
+
# Debug: Show the cleaned response
|
59 |
+
print("[DEBUG] Cleaned Response:", cleaned_response)
|
60 |
+
|
61 |
+
return cleaned_response
|
62 |
|
63 |
except Exception as e:
|
64 |
# Debug: Log errors
|