JeCabrera commited on
Commit
f9815f3
verified
1 Parent(s): 8cb552a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -26
app.py CHANGED
@@ -26,9 +26,6 @@ IMAGE_CACHE_DIRECTORY = "/tmp"
26
  IMAGE_WIDTH = 512
27
  CHAT_HISTORY = List[Tuple[Optional[Union[Tuple[str], str]], Optional[str]]]
28
 
29
- # Configurar la API de Gemini
30
- genai.configure(api_key=GOOGLE_API_KEY)
31
-
32
  # Funci贸n para transformar el historial del chat
33
  def transform_history(history: CHAT_HISTORY):
34
  """
@@ -43,27 +40,23 @@ def transform_history(history: CHAT_HISTORY):
43
  return transformed
44
 
45
  # Funci贸n de generaci贸n de respuesta
46
- def response(
47
- message: str, history: CHAT_HISTORY, model_choice: str, system_instruction: str
48
- ) -> str:
49
  """
50
  Genera una respuesta basada en el historial del chat y el mensaje del usuario.
51
  """
52
- generation_config = genai.types.GenerationConfig(
53
- temperature=0.7,
54
- max_output_tokens=8192,
55
- top_k=10,
56
- top_p=0.9
 
57
  )
58
-
59
- model = genai.GenerativeModel(
60
- model_name=model_choice,
61
- generation_config=generation_config,
62
- system_instruction=system_instruction,
63
- )
64
-
65
- transformed_history = transform_history(history)
66
- model_response = model.chat(messages=transformed_history + [{"role": "user", "content": message}])
67
  return model_response.text
68
 
69
  # Preprocesamiento de im谩genes
@@ -83,7 +76,7 @@ def cache_pil_image(image: Image.Image) -> str:
83
  # Subir im谩genes
84
  def upload(files: Optional[List[str]], chatbot: CHAT_HISTORY) -> CHAT_HISTORY:
85
  for file in files:
86
- image = Image.open(file).convert("RGB")
87
  image_preview = preprocess_image(image)
88
  if image_preview:
89
  gr.Image(image_preview).render()
@@ -102,10 +95,32 @@ def bot(
102
  files: Optional[List[str]],
103
  model_choice: str,
104
  system_instruction: str,
105
- chatbot: CHAT_HISTORY,
106
  ):
 
 
 
 
 
 
 
 
 
 
 
 
107
  text_prompt = chatbot[-1][0] if chatbot and chatbot[-1][0] and isinstance(chatbot[-1][0], str) else ""
108
- bot_reply = response(text_prompt, chatbot, model_choice, system_instruction)
 
 
 
 
 
 
 
 
 
 
109
  chatbot[-1] = (text_prompt, bot_reply)
110
  return chatbot
111
 
@@ -114,7 +129,7 @@ system_instruction_component = gr.Textbox(
114
  placeholder="Enter system instruction...", show_label=True, scale=8
115
  )
116
  chatbot_component = gr.Chatbot(
117
- label="Gemini",
118
  bubble_full_width=False,
119
  scale=2,
120
  height=300
@@ -135,14 +150,14 @@ model_choice_component = gr.Dropdown(
135
 
136
  user_inputs = [
137
  text_prompt_component,
138
- chatbot_component,
139
  ]
140
 
141
  bot_inputs = [
142
  upload_button_component,
143
  model_choice_component,
144
  system_instruction_component,
145
- chatbot_component,
146
  ]
147
 
148
  # Interfaz de usuario
 
26
  IMAGE_WIDTH = 512
27
  CHAT_HISTORY = List[Tuple[Optional[Union[Tuple[str], str]], Optional[str]]]
28
 
 
 
 
29
  # Funci贸n para transformar el historial del chat
30
  def transform_history(history: CHAT_HISTORY):
31
  """
 
40
  return transformed
41
 
42
  # Funci贸n de generaci贸n de respuesta
43
+ def response(message: str, history: CHAT_HISTORY, model: genai.GenerativeModel):
 
 
44
  """
45
  Genera una respuesta basada en el historial del chat y el mensaje del usuario.
46
  """
47
+ # Crear el input para el modelo basado en el historial y el mensaje del usuario
48
+ input_text = "\n".join(
49
+ [
50
+ f"User: {item[0]}" if item[0] else f"Bot: {item[1]}"
51
+ for item in history
52
+ ]
53
  )
54
+ input_text += f"\nUser: {message}"
55
+
56
+ # Generar la respuesta
57
+ model_response = model.generate(input_text)
58
+
59
+ # Retornar la respuesta generada
 
 
 
60
  return model_response.text
61
 
62
  # Preprocesamiento de im谩genes
 
76
  # Subir im谩genes
77
  def upload(files: Optional[List[str]], chatbot: CHAT_HISTORY) -> CHAT_HISTORY:
78
  for file in files:
79
+ image = Image.open(file).convert('RGB')
80
  image_preview = preprocess_image(image)
81
  if image_preview:
82
  gr.Image(image_preview).render()
 
95
  files: Optional[List[str]],
96
  model_choice: str,
97
  system_instruction: str,
98
+ chatbot: CHAT_HISTORY
99
  ):
100
+ if not GOOGLE_API_KEY:
101
+ raise ValueError("GOOGLE_API_KEY is not set.")
102
+
103
+ # Configurar la API con la clave
104
+ genai.configure(api_key=GOOGLE_API_KEY)
105
+ generation_config = genai.types.GenerationConfig(
106
+ temperature=0.7,
107
+ max_output_tokens=8192,
108
+ top_k=10,
109
+ top_p=0.9
110
+ )
111
+
112
  text_prompt = chatbot[-1][0] if chatbot and chatbot[-1][0] and isinstance(chatbot[-1][0], str) else ""
113
+ transformed_history = transform_history(chatbot)
114
+
115
+ # Crear el modelo con la instrucci贸n del sistema
116
+ model = genai.GenerativeModel(
117
+ model_name=model_choice,
118
+ generation_config=generation_config,
119
+ system_instruction=system_instruction
120
+ )
121
+
122
+ # Generar la respuesta usando la funci贸n `response`
123
+ bot_reply = response(text_prompt, transformed_history, model)
124
  chatbot[-1] = (text_prompt, bot_reply)
125
  return chatbot
126
 
 
129
  placeholder="Enter system instruction...", show_label=True, scale=8
130
  )
131
  chatbot_component = gr.Chatbot(
132
+ label='Gemini',
133
  bubble_full_width=False,
134
  scale=2,
135
  height=300
 
150
 
151
  user_inputs = [
152
  text_prompt_component,
153
+ chatbot_component
154
  ]
155
 
156
  bot_inputs = [
157
  upload_button_component,
158
  model_choice_component,
159
  system_instruction_component,
160
+ chatbot_component
161
  ]
162
 
163
  # Interfaz de usuario