JeCabrera commited on
Commit
8acb879
verified
1 Parent(s): ae628c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -9
app.py CHANGED
@@ -16,9 +16,10 @@ genai.configure(api_key=GOOGLE_API_KEY)
16
  def transform_history(history):
17
  """Transforma el historial en el formato esperado por Gemini."""
18
  new_history = []
19
- for chat in history:
20
- new_history.append({"role": "user", "content": chat[0]})
21
- new_history.append({"role": "assistant", "content": chat[1]})
 
22
  return new_history
23
 
24
  def bot(files: Optional[List[str]], model_choice: str, system_instruction: Optional[str], history):
@@ -36,20 +37,27 @@ def bot(files: Optional[List[str]], model_choice: str, system_instruction: Optio
36
  top_p=0.9
37
  )
38
 
 
 
 
 
 
 
 
39
  response = genai.ChatCompletion.create(
40
  model=model_choice,
41
- messages=chat_history + [{"role": "user", "content": history[-1][0]}],
42
  generation_config=generation_config
43
  )
44
 
45
  reply = response['candidates'][0]['content']
46
  for i in range(len(reply)):
47
  time.sleep(0.05)
48
- yield history + [{"role": "assistant", "content": reply[:i + 1]}]
49
 
50
  # Interfaz con Gradio
51
  with gr.Blocks() as demo:
52
- chatbot = gr.Chatbot(elem_id="chatbot", bubble_full_width=False, type="messages")
53
 
54
  chat_input = gr.Textbox(
55
  placeholder="Escribe un mensaje...",
@@ -59,11 +67,13 @@ with gr.Blocks() as demo:
59
  submit_btn = gr.Button("Enviar")
60
  system_input = gr.Textbox(placeholder="Instrucci贸n del sistema (opcional)", show_label=True, lines=2)
61
  model_choice = gr.Dropdown(choices=["gemini-1.5-flash"], value="gemini-1.5-flash", label="Modelo")
 
62
 
 
63
  submit_btn.click(
64
- bot, # Funci贸n que manejar谩 la acci贸n del bot贸n
65
- inputs=[chat_input, model_choice, system_input, chatbot], # Aseg煤rate de que las entradas sean v谩lidas
66
- outputs=chatbot # El chatbot ser谩 actualizado con la respuesta
67
  )
68
 
69
  demo.launch()
 
16
  def transform_history(history):
17
  """Transforma el historial en el formato esperado por Gemini."""
18
  new_history = []
19
+ for user, assistant in history:
20
+ new_history.append({"role": "user", "content": user})
21
+ if assistant:
22
+ new_history.append({"role": "assistant", "content": assistant})
23
  return new_history
24
 
25
  def bot(files: Optional[List[str]], model_choice: str, system_instruction: Optional[str], history):
 
37
  top_p=0.9
38
  )
39
 
40
+ # Si se incluyen archivos, procesa esa entrada multimodal
41
+ if files:
42
+ for file_path in files:
43
+ with open(file_path, "r") as file:
44
+ file_content = file.read()
45
+ chat_history.append({"role": "user", "content": f"Archivo cargado: {file_content}"})
46
+
47
  response = genai.ChatCompletion.create(
48
  model=model_choice,
49
+ messages=chat_history,
50
  generation_config=generation_config
51
  )
52
 
53
  reply = response['candidates'][0]['content']
54
  for i in range(len(reply)):
55
  time.sleep(0.05)
56
+ yield history + [[None, reply[:i + 1]]] # Agrega la respuesta progresivamente al historial
57
 
58
  # Interfaz con Gradio
59
  with gr.Blocks() as demo:
60
+ chatbot = gr.Chatbot(elem_id="chatbot", bubble_full_width=False, type="interactive")
61
 
62
  chat_input = gr.Textbox(
63
  placeholder="Escribe un mensaje...",
 
67
  submit_btn = gr.Button("Enviar")
68
  system_input = gr.Textbox(placeholder="Instrucci贸n del sistema (opcional)", show_label=True, lines=2)
69
  model_choice = gr.Dropdown(choices=["gemini-1.5-flash"], value="gemini-1.5-flash", label="Modelo")
70
+ file_input = gr.File(label="Subir archivo (opcional)", file_types=[".txt", ".md", ".json"])
71
 
72
+ # Manejar el env铆o del mensaje
73
  submit_btn.click(
74
+ bot,
75
+ inputs=[file_input, model_choice, system_input, chatbot],
76
+ outputs=chatbot
77
  )
78
 
79
  demo.launch()