File size: 1,973 Bytes
c59c85b
b1a1082
c59c85b
b1a1082
c59c85b
017d299
b1a1082
c59c85b
 
 
 
 
 
 
 
b1a1082
c59c85b
 
 
 
 
 
a54087d
c59c85b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b1a1082
c59c85b
b1a1082
c59c85b
 
 
 
 
 
 
b1015d8
017d299
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import os
import google.generativeai as genai
import gradio as gr

# Configure the API key
genai.configure(api_key=os.environ["GEMINI_API_KEY"])

# Create the model
generation_config = {
    "temperature": 1,
    "top_p": 0.95,
    "top_k": 64,
    "max_output_tokens": 8192,
    "response_mime_type": "text/plain",
}

model = genai.GenerativeModel(
    model_name="gemini-1.5-flash",
    generation_config=generation_config,
    # safety_settings can be adjusted
    # See https://ai.google.dev/gemini-api/docs/safety-settings
)

# Function to handle chat
def chat_with_model(user_input, history):
    # Update the history with the user's input
    history.append({"role": "user", "content": user_input})
    # Start or continue the chat session
    chat_session = model.start_chat(history=history)
    response = chat_session.send_message(user_input)
    # Add the assistant's response to the history
    history.append({"role": "assistant", "content": response.text})
    # Format the history for display in Gradio Chatbot
    messages = []
    for i in range(0, len(history), 2):
        user_msg = history[i]["content"]
        assistant_msg = history[i+1]["content"] if i+1 < len(history) else ""
        messages.append((user_msg, assistant_msg))
    return messages, history

# Create Gradio app
with gr.Blocks() as demo:
    gr.Markdown("# Chat with Gemini Model")
    chatbot = gr.Chatbot()
    state = gr.State([])
    with gr.Row():
        user_input = gr.Textbox(
            show_label=False,
            placeholder="Type your message and press Enter"
        )
        send_btn = gr.Button("Send")  # Ensure this line uses 'send_btn'
    # Event handlers
    send_btn.click(chat_with_model, [user_input, state], [chatbot, state])
    user_input.submit(chat_with_model, [user_input, state], [chatbot, state])
    # Clear input after sending
    send_btn.click(lambda: "", None, user_input)
    user_input.submit(lambda: "", None, user_input)

demo.launch()