mateoluksenberg commited on
Commit
b633491
·
verified ·
1 Parent(s): 606bcfd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -63
app.py CHANGED
@@ -226,16 +226,13 @@ def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096,
226
 
227
  conversation = []
228
 
229
- # Acceder al contenido del archivo y al nombre del archivo
230
  if "file_content" in message and message["file_content"]:
231
  file_content = message["file_content"]
232
  file_name = message["file_name"]
233
 
234
- # Guardar el archivo en un archivo temporal
235
  with open(file_name, "wb") as f:
236
  f.write(file_content.read())
237
 
238
- # Llamar a `mode_load` con el nombre del archivo
239
  choice, contents = mode_load(file_name)
240
 
241
  if choice == "image":
@@ -244,28 +241,12 @@ def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096,
244
  format_msg = contents + "\n\n\n" + "{} files uploaded.\n".format(1) + message['text']
245
  conversation.append({"role": "user", "content": format_msg})
246
  else:
247
- # Manejar caso donde no se sube archivo
248
  conversation.append({"role": "user", "content": message['text']})
249
 
250
- print("--------------")
251
- print(" ")
252
- print(conversation)
253
- print(" ")
254
- print("--------------")
255
-
256
  input_ids = tokenizer.apply_chat_template(conversation, tokenize=True, add_generation_prompt=True, return_tensors="pt", return_dict=True).to(model.device)
257
 
258
  streamer = TextIteratorStreamer(tokenizer, timeout=60.0, skip_prompt=True, skip_special_tokens=True)
259
 
260
-
261
- print("--------------")
262
- print(" ")
263
- print(input_ids)
264
- print("--------------")
265
- print(streamer)
266
- print(" ")
267
- print("--------------")
268
-
269
  generate_kwargs = dict(
270
  max_length=max_length,
271
  do_sample=True,
@@ -278,32 +259,46 @@ def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096,
278
 
279
  gen_kwargs = {**input_ids, **generate_kwargs}
280
 
281
-
282
  with torch.no_grad():
283
  thread = Thread(target=model.generate, kwargs=gen_kwargs)
284
  thread.start()
285
- print("--------------")
286
- print(" ")
287
- print(thread)
288
- print(" ")
289
- print("--------------")
290
  buffer = ""
291
  for new_text in streamer:
292
  buffer += new_text
293
- yield new_text
294
- print("--------------")
295
- print(new_text)
296
- print("Buffer: ")
297
- print(" ")
298
- print(buffer)
299
- print(" ")
300
- print("--------------")
301
 
302
-
303
- return StreamingResponse(new_text, media_type="text/plain")
304
 
305
  except Exception as e:
306
- return PlainTextResponse(f"Error: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
307
 
308
  # def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096, top_p: float = 1, top_k: int = 10, penalty: float = 1.0):
309
  # try:
@@ -368,36 +363,36 @@ def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096,
368
 
369
 
370
 
371
- @app.post("/chat/")
372
- async def test_endpoint(
373
- text: str = Form(...),
374
- file: UploadFile = File(None)
375
- ):
376
- # Verificar si se ha subido un archivo
377
- if file:
378
- # Leer el archivo en memoria
379
- file_content = BytesIO(await file.read())
380
- file_name = file.filename
381
 
382
- # Construir el mensaje con el archivo y el texto
383
- message = {
384
- "text": text,
385
- "file_content": file_content,
386
- "file_name": file_name
387
- }
388
- else:
389
- # Si no se sube archivo, solo se incluye el texto
390
- message = {
391
- "text": text,
392
- "file_content": None,
393
- "file_name": None
394
- }
395
 
396
- # Llamar a la función `simple_chat` con el mensaje
397
- print(message)
398
- response = simple_chat(message)
399
 
400
- return response
401
 
402
 
403
  with gr.Blocks(css=CSS, theme="soft", fill_height=True) as demo:
 
226
 
227
  conversation = []
228
 
 
229
  if "file_content" in message and message["file_content"]:
230
  file_content = message["file_content"]
231
  file_name = message["file_name"]
232
 
 
233
  with open(file_name, "wb") as f:
234
  f.write(file_content.read())
235
 
 
236
  choice, contents = mode_load(file_name)
237
 
238
  if choice == "image":
 
241
  format_msg = contents + "\n\n\n" + "{} files uploaded.\n".format(1) + message['text']
242
  conversation.append({"role": "user", "content": format_msg})
243
  else:
 
244
  conversation.append({"role": "user", "content": message['text']})
245
 
 
 
 
 
 
 
246
  input_ids = tokenizer.apply_chat_template(conversation, tokenize=True, add_generation_prompt=True, return_tensors="pt", return_dict=True).to(model.device)
247
 
248
  streamer = TextIteratorStreamer(tokenizer, timeout=60.0, skip_prompt=True, skip_special_tokens=True)
249
 
 
 
 
 
 
 
 
 
 
250
  generate_kwargs = dict(
251
  max_length=max_length,
252
  do_sample=True,
 
259
 
260
  gen_kwargs = {**input_ids, **generate_kwargs}
261
 
 
262
  with torch.no_grad():
263
  thread = Thread(target=model.generate, kwargs=gen_kwargs)
264
  thread.start()
 
 
 
 
 
265
  buffer = ""
266
  for new_text in streamer:
267
  buffer += new_text
 
 
 
 
 
 
 
 
268
 
269
+ return buffer
 
270
 
271
  except Exception as e:
272
+ return f"Error: {str(e)}"
273
+
274
+ @app.post("/chat/")
275
+ async def test_endpoint(
276
+ text: str = Form(...),
277
+ file: UploadFile = File(None)
278
+ ):
279
+ if file:
280
+ file_content = BytesIO(await file.read())
281
+ file_name = file.filename
282
+
283
+ message = {
284
+ "text": text,
285
+ "file_content": file_content,
286
+ "file_name": file_name
287
+ }
288
+ else:
289
+ message = {
290
+ "text": text,
291
+ "file_content": None,
292
+ "file_name": None
293
+ }
294
+
295
+ print(message)
296
+ response = simple_chat(message)
297
+
298
+ if isinstance(response, str) and response.startswith("Error:"):
299
+ return PlainTextResponse(response, status_code=500)
300
+ else:
301
+ return StreamingResponse(BytesIO(response.encode()), media_type="text/plain")
302
 
303
  # def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096, top_p: float = 1, top_k: int = 10, penalty: float = 1.0):
304
  # try:
 
363
 
364
 
365
 
366
+ # @app.post("/chat/")
367
+ # async def test_endpoint(
368
+ # text: str = Form(...),
369
+ # file: UploadFile = File(None)
370
+ # ):
371
+ # # Verificar si se ha subido un archivo
372
+ # if file:
373
+ # # Leer el archivo en memoria
374
+ # file_content = BytesIO(await file.read())
375
+ # file_name = file.filename
376
 
377
+ # # Construir el mensaje con el archivo y el texto
378
+ # message = {
379
+ # "text": text,
380
+ # "file_content": file_content,
381
+ # "file_name": file_name
382
+ # }
383
+ # else:
384
+ # # Si no se sube archivo, solo se incluye el texto
385
+ # message = {
386
+ # "text": text,
387
+ # "file_content": None,
388
+ # "file_name": None
389
+ # }
390
 
391
+ # # Llamar a la función `simple_chat` con el mensaje
392
+ # print(message)
393
+ # response = simple_chat(message)
394
 
395
+ # return response
396
 
397
 
398
  with gr.Blocks(css=CSS, theme="soft", fill_height=True) as demo: