mateoluksenberg commited on
Commit
30a65a8
·
verified ·
1 Parent(s): bf31139

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -11
app.py CHANGED
@@ -258,18 +258,44 @@ def simple_chat(message, temperature: float = 0.8, max_length: int = 4096, top_p
258
  )
259
  gen_kwargs = {**input_ids, **generate_kwargs}
260
 
261
- with torch.no_grad():
262
- thread = Thread(target=model.generate, kwargs=gen_kwargs)
263
- thread.start()
264
- buffer = ""
265
- for new_text in streamer:
266
- buffer += new_text
267
- yield buffer
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268
 
269
- print("---------")
270
- print("Text: ")
271
- print(buffer)
272
- print("---------")
 
 
 
273
 
274
 
275
 
 
258
  )
259
  gen_kwargs = {**input_ids, **generate_kwargs}
260
 
261
+ buffer = ""
262
+
263
+ def generate_text():
264
+ nonlocal buffer
265
+ with torch.no_grad():
266
+ thread = Thread(target=model.generate, kwargs=gen_kwargs)
267
+ thread.start()
268
+ for new_text in streamer:
269
+ buffer += new_text
270
+
271
+ # Start the generation in a separate thread
272
+ generate_text()
273
+
274
+ # Wait for the generation to finish
275
+ thread.join()
276
+
277
+ print("---------")
278
+ print("Text: ")
279
+ print(buffer)
280
+ print("---------")
281
+
282
+ return PlainTextResponse(buffer)
283
+
284
+ # with torch.no_grad():
285
+ # thread = Thread(target=model.generate, kwargs=gen_kwargs)
286
+ # thread.start()
287
+ # buffer = ""
288
+ # for new_text in streamer:
289
+ # buffer += new_text
290
+ # yield buffer
291
 
292
+ # print("---------")
293
+ # print("Text: ")
294
+ # print(buffer)
295
+ # print("---------")
296
+
297
+ # return PlainTextResponse(buffer)
298
+
299
 
300
 
301