Spaces:
Running
Running
import gradio as gr | |
import google.generativeai as genai | |
# Set your API key | |
genai.configure(api_key='YOUR_API_KEY') | |
# Initialize the model | |
model = genai.GenerativeModel('gemini-1.5-flash') | |
def respond(user_message, history, chat_state): | |
if chat_state is None: | |
# Start a new chat with an initial greeting | |
chat_state = model.start_chat( | |
history=[ | |
{"author": "user", "content": "Hello"}, | |
{"author": "assistant", "content": "Great to meet you. What would you like to know?"}, | |
] | |
) | |
# Initialize history if it's empty | |
if not history: | |
history = [["Hello", "Great to meet you. What would you like to know?"]] | |
else: | |
# Continue the conversation | |
chat_state.messages.append({"author": "user", "content": user_message}) | |
# Generate a response | |
response = chat_state.generate_message() | |
# Append the user's message and model's response to the history | |
history.append([user_message, response]) | |
return history, chat_state, '' | |
with gr.Blocks() as demo: | |
gr.Markdown("<h1 align='center'>Gemini 1.5 Flash Chatbot Demo</h1>") | |
chatbot = gr.Chatbot([["Hello", "Great to meet you. What would you like to know?"]]) | |
msg = gr.Textbox(placeholder="Type your message here...", show_label=False) | |
state = gr.State() # To store the chat_state object | |
msg.submit(respond, [msg, chatbot, state], [chatbot, state, msg]) | |
demo.launch() | |