File size: 1,234 Bytes
57c5bcb
c3cd834
9c56588
c3cd834
57c5bcb
 
37ac5ae
74fa8e5
 
485a5d1
74fa8e5
 
0195449
74fa8e5
b118f5f
37ac5ae
74fa8e5
37ac5ae
beecf3e
9c56588
76fcda3
74fa8e5
beecf3e
49bb563
57c5bcb
 
 
 
74fa8e5
 
49bb563
74fa8e5
 
49bb563
74fa8e5
9c56588
49bb563
74fa8e5
 
49bb563
74fa8e5
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
import streamlit as st
from google.generativeai import GenerativeModel

# Set API key as an environment variable
os.environ['GOOGLE_API_KEY'] = "AIzaSyC70u1sN87IkoxOoIj4XCAPw97ae2LZwNM"

# Create chatbot interface
st.title("Gemini API Chatbot")

# Get chat history from session state
chat_history = st.session_state.get("chat_history", [])

# Get user input from text box
user_input = st.text_input("You")

# Check if user input is not empty
if user_input:
    # Add user message to chat history
    chat_history.append(user_input)

    # Display user message with markdown
    st.markdown(f"**You:** {user_input}")

    # Create model object
    model = GenerativeModel(model_name="gemini-pro")

    # Get model response with generate_content method
    with st.spinner("Thinking..."):
        response = model.generate_content(chat_history)

    # Get response text from response object
    response_text = response.contents[-1].parts[0].text

    # Add response message to chat history
    chat_history.append(response_text)

    # Display response message with markdown
    st.markdown(f"**Gemini Bot:** {response_text}")

    # Update session state with chat history
    st.session_state["chat_history"] = chat_history