import gradio as gr import google.generativeai as genai import os # Configure the Gemini API with your API key genai.configure(api_key=os.environ["API_KEY"]) # Initialize the Gemini Generative Model model = genai.GenerativeModel("gemini-1.5-flash") def chat_with_gemini(user_input, history): """ Generates a response from the Gemini API based on user input and conversation history. Args: user_input (str): The latest message from the user. history (list): The conversation history as a list of tuples. Returns: tuple: The chatbot's reply and the updated history. """ try: # Send the latest message to the Gemini API response = model.generate_content( user_input, generation_config=genai.GenerationConfig( max_output_tokens=150, temperature=0.7 ) ) chatbot_reply = response.text.strip() # Append the user input and chatbot reply to the history history.append((user_input, chatbot_reply)) return history, history except Exception as e: error_message = f"An error occurred: {e}" history.append((user_input, error_message)) return history, history with gr.Blocks() as iface: gr.Markdown("# 🗣️ Google Gemini Chatbot") chatbot = gr.Chatbot() with gr.Row(): with gr.Column(scale=0.85): user_input = gr.Textbox( placeholder="Type your message here...", show_label=False ) with gr.Column(scale=0.15): send_button = gr.Button("Send") # State to hold the conversation history history = gr.State([]) def respond(message, history_state): """ Handles the user message, generates a response, and updates the conversation history. Args: message (str): The user's message. history_state (list): The current conversation history. Returns: tuple: Updated conversation history for display. """ updated_history, new_history = chat_with_gemini(message, history_state) return updated_history, new_history send_button.click(respond, inputs=[user_input, history], outputs=[chatbot, history]) user_input.submit(respond, inputs=[user_input, history], outputs=[chatbot, history]) if __name__ == "__main__": iface.launch()