Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -278,28 +278,29 @@ def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096,
|
|
278 |
|
279 |
gen_kwargs = {**input_ids, **generate_kwargs}
|
280 |
|
281 |
-
# Define the function to run generation
|
282 |
-
def generate_text():
|
283 |
-
with torch.no_grad():
|
284 |
-
model.generate(**gen_kwargs, streamer=streamer)
|
285 |
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
|
|
|
|
|
|
291 |
buffer = ""
|
292 |
for new_text in streamer:
|
293 |
buffer += new_text
|
294 |
yield new_text
|
295 |
print("--------------")
|
|
|
296 |
print("Buffer: ")
|
297 |
print(" ")
|
298 |
print(buffer)
|
299 |
print(" ")
|
300 |
print("--------------")
|
301 |
|
302 |
-
|
|
|
303 |
|
304 |
except Exception as e:
|
305 |
return PlainTextResponse(f"Error: {str(e)}")
|
|
|
278 |
|
279 |
gen_kwargs = {**input_ids, **generate_kwargs}
|
280 |
|
|
|
|
|
|
|
|
|
281 |
|
282 |
+
with torch.no_grad():
|
283 |
+
thread = Thread(target=model.generate, kwargs=gen_kwargs)
|
284 |
+
thread.start()
|
285 |
+
print("--------------")
|
286 |
+
print(" ")
|
287 |
+
print(thread)
|
288 |
+
print(" ")
|
289 |
+
print("--------------")
|
290 |
buffer = ""
|
291 |
for new_text in streamer:
|
292 |
buffer += new_text
|
293 |
yield new_text
|
294 |
print("--------------")
|
295 |
+
print(new_text)
|
296 |
print("Buffer: ")
|
297 |
print(" ")
|
298 |
print(buffer)
|
299 |
print(" ")
|
300 |
print("--------------")
|
301 |
|
302 |
+
|
303 |
+
return StreamingResponse(new_text), media_type="text/plain")
|
304 |
|
305 |
except Exception as e:
|
306 |
return PlainTextResponse(f"Error: {str(e)}")
|