Spaces:
Runtime error
Runtime error
michailroussos
commited on
Commit
·
15bfa4e
1
Parent(s):
5830d67
more
Browse files
app.py
CHANGED
|
@@ -46,7 +46,7 @@ def respond(message, max_new_tokens, temperature, system_message=""):
|
|
| 46 |
|
| 47 |
# Stream response
|
| 48 |
text_streamer = TextStreamer(tokenizer, skip_prompt=True)
|
| 49 |
-
|
| 50 |
input_ids=input_ids,
|
| 51 |
max_new_tokens=max_new_tokens,
|
| 52 |
temperature=temperature,
|
|
@@ -54,7 +54,14 @@ def respond(message, max_new_tokens, temperature, system_message=""):
|
|
| 54 |
streamer=text_streamer,
|
| 55 |
)
|
| 56 |
|
| 57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
except Exception as e:
|
| 59 |
# Debug: Log errors
|
| 60 |
print("[ERROR]", str(e))
|
|
|
|
| 46 |
|
| 47 |
# Stream response
|
| 48 |
text_streamer = TextStreamer(tokenizer, skip_prompt=True)
|
| 49 |
+
model.generate(
|
| 50 |
input_ids=input_ids,
|
| 51 |
max_new_tokens=max_new_tokens,
|
| 52 |
temperature=temperature,
|
|
|
|
| 54 |
streamer=text_streamer,
|
| 55 |
)
|
| 56 |
|
| 57 |
+
# Get the response generated by the model
|
| 58 |
+
# Retrieve text from the output stream (assuming this works with your setup)
|
| 59 |
+
generated_text = text_streamer.generated_text # This assumes the `TextStreamer` accumulates the generated text
|
| 60 |
+
|
| 61 |
+
# Debug: Show the response text
|
| 62 |
+
print("[DEBUG] Generated Text:", generated_text)
|
| 63 |
+
|
| 64 |
+
return generated_text
|
| 65 |
except Exception as e:
|
| 66 |
# Debug: Log errors
|
| 67 |
print("[ERROR]", str(e))
|