Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -3,19 +3,14 @@ import gradio as gr
|
|
3 |
import google.generativeai as genai
|
4 |
import os
|
5 |
|
6 |
-
# Cargar
|
7 |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
8 |
|
9 |
if not GOOGLE_API_KEY:
|
10 |
raise ValueError("GOOGLE_API_KEY is not set in environment variables.")
|
11 |
|
12 |
-
# Configurar la API de Gemini
|
13 |
genai.configure(api_key=GOOGLE_API_KEY)
|
14 |
|
15 |
-
# Inicializar el chat de Gemini
|
16 |
-
chat = genai.Chat()
|
17 |
-
|
18 |
-
# Transformar el historial de Gradio a formato compatible con Gemini
|
19 |
def transform_history(history):
|
20 |
new_history = []
|
21 |
for chat in history:
|
@@ -23,21 +18,22 @@ def transform_history(history):
|
|
23 |
new_history.append({"parts": [{"text": chat[1]}], "role": "model"})
|
24 |
return new_history
|
25 |
|
26 |
-
# Funci贸n para generar una respuesta con Gemini
|
27 |
def response(message, history):
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
|
|
|
|
|
|
37 |
time.sleep(0.05)
|
38 |
-
yield
|
39 |
|
40 |
-
# Interfaz de usuario con Gradio
|
41 |
gr.ChatInterface(response,
|
42 |
title='Gemini Chat',
|
43 |
textbox=gr.Textbox(placeholder="Pregunta a Gemini"),
|
|
|
3 |
import google.generativeai as genai
|
4 |
import os
|
5 |
|
6 |
+
# Cargar clave API desde variables de entorno
|
7 |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
8 |
|
9 |
if not GOOGLE_API_KEY:
|
10 |
raise ValueError("GOOGLE_API_KEY is not set in environment variables.")
|
11 |
|
|
|
12 |
genai.configure(api_key=GOOGLE_API_KEY)
|
13 |
|
|
|
|
|
|
|
|
|
14 |
def transform_history(history):
|
15 |
new_history = []
|
16 |
for chat in history:
|
|
|
18 |
new_history.append({"parts": [{"text": chat[1]}], "role": "model"})
|
19 |
return new_history
|
20 |
|
|
|
21 |
def response(message, history):
|
22 |
+
chat_history = transform_history(history)
|
23 |
+
|
24 |
+
response = genai.ChatCompletion.create(
|
25 |
+
model="gemini-1.5-flash", # Aseg煤rate de usar el modelo correcto
|
26 |
+
messages=chat_history + [{"role": "user", "content": message}],
|
27 |
+
max_tokens=150,
|
28 |
+
)
|
29 |
+
|
30 |
+
reply = response['choices'][0]['message']['content']
|
31 |
+
|
32 |
+
# Mostrar la respuesta personaje por personaje
|
33 |
+
for i in range(len(reply)):
|
34 |
time.sleep(0.05)
|
35 |
+
yield reply[:i + 1]
|
36 |
|
|
|
37 |
gr.ChatInterface(response,
|
38 |
title='Gemini Chat',
|
39 |
textbox=gr.Textbox(placeholder="Pregunta a Gemini"),
|