Spaces:
Sleeping
Sleeping
File size: 2,496 Bytes
c59c85b a73fe7b b1a1082 a73fe7b b1a1082 a73fe7b a54087d a73fe7b ab8a15b a73fe7b ab8a15b a73fe7b b1a1082 a73fe7b c59c85b a73fe7b c59c85b a73fe7b 017d299 a73fe7b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
import gradio as gr
import google.generativeai as genai
import os
# Configure the Gemini API with your API key
genai.configure(api_key=os.environ["API_KEY"])
# Initialize the Gemini Generative Model
model = genai.GenerativeModel("gemini-1.5-flash")
def chat_with_gemini(user_input, history):
"""
Generates a response from the Gemini API based on user input and conversation history.
Args:
user_input (str): The latest message from the user.
history (list): The conversation history as a list of tuples.
Returns:
tuple: The chatbot's reply and the updated history.
"""
try:
# Send the latest message to the Gemini API
response = model.generate_content(
user_input,
generation_config=genai.GenerationConfig(
max_output_tokens=150,
temperature=0.7
)
)
chatbot_reply = response.text.strip()
# Append the user input and chatbot reply to the history
history.append((user_input, chatbot_reply))
return history, history
except Exception as e:
error_message = f"An error occurred: {e}"
history.append((user_input, error_message))
return history, history
with gr.Blocks() as iface:
gr.Markdown("# 🗣️ Google Gemini Chatbot")
chatbot = gr.Chatbot()
with gr.Row():
with gr.Column(scale=0.85):
user_input = gr.Textbox(
placeholder="Type your message here...",
show_label=False
)
with gr.Column(scale=0.15):
send_button = gr.Button("Send")
# State to hold the conversation history
history = gr.State([])
def respond(message, history_state):
"""
Handles the user message, generates a response, and updates the conversation history.
Args:
message (str): The user's message.
history_state (list): The current conversation history.
Returns:
tuple: Updated conversation history for display.
"""
updated_history, new_history = chat_with_gemini(message, history_state)
return updated_history, new_history
send_button.click(respond, inputs=[user_input, history], outputs=[chatbot, history])
user_input.submit(respond, inputs=[user_input, history], outputs=[chatbot, history])
if __name__ == "__main__":
iface.launch() |