File size: 2,808 Bytes
c748174
c1bab7b
 
4d21d63
1d3466d
7a4fa7e
c1bab7b
 
1d3466d
 
c1bab7b
bd1d195
c748174
 
 
c1bab7b
c748174
5b0a6ee
e21ddc9
c748174
210d6f5
e21ddc9
 
bd1d195
7a4fa7e
 
bd1d195
7a4fa7e
c1bab7b
fc87a94
bd1d195
 
 
 
c748174
bd1d195
c1bab7b
bd1d195
 
c1bab7b
 
bd1d195
c1bab7b
bd1d195
 
c1bab7b
bd1d195
 
c1bab7b
bd1d195
 
 
cecacb2
bd1d195
 
 
 
 
 
 
c1bab7b
bd1d195
c1bab7b
 
 
c748174
c1bab7b
 
bd1d195
c1bab7b
 
 
 
 
2ae0133
c748174
 
 
c1bab7b
c748174
c1bab7b
 
 
d78e5c8
 
 
c1bab7b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import streamlit as st
import google.generativeai as genai
import sqlite3 
from streamlit import file_uploader

# Database setup
conn = sqlite3.connect('chat_history.db')  
c = conn.cursor()

c.execute('''
          CREATE TABLE IF NOT EXISTS history  
          (role TEXT, message TEXT)
          ''')

# Generative AI setup
api_key = "AIzaSyC70u1sN87IkoxOoIj4XCAPw97ae2LZwNM"   
genai.configure(api_key=api_key)

generation_config = {
  "temperature": 0.9,
  "max_output_tokens": 3000  
}

safety_settings = []

# Streamlit UI
st.title("Chatbot")

chat_history = st.session_state.get("chat_history", []) 

if len(chat_history) % 2 == 0:
    role = "user"
else:
    role = "model"

for message in chat_history:
    r, t = message["role"], message["parts"][0]["text"]
    st.markdown(f"**{r.title()}:** {t}")
    
# Use text_area for multiline input   
user_input = st.text_area("", height=5)  
if user_input:
    chat_history.append({"role": role, "parts": [{"text": user_input}]}) 
    if role == "user":
        
        # Model code
        model_name = "gemini-pro"
        model = genai.GenerativeModel(
            model_name=model_name,   
            generation_config=generation_config,
            safety_settings=safety_settings
        )
        
        response = model.generate_content(chat_history) 
        response_text = response.text
        chat_history.append({"role": "model", "parts": [{"text": response_text}]})
        
        st.session_state["chat_history"] = chat_history
        
for message in chat_history:
    r, t = message["role"], message["parts"][0]["text"]
    st.markdown(f"**{r.title()}:** {t}")
if st.button("Display History"): 
    c.execute("SELECT * FROM history") 
    rows = c.fetchall() 

    for row in rows: 
        st.markdown(f"**{row[0].title()}:** {row[1]}") 
        
# Save chat history to database
for message in chat_history: 
    c.execute("INSERT INTO history VALUES (?, ?)", 
            (message["role"], message["parts"][0]["text"])) 
    conn.commit() 

conn.close()

# Separate section for image uploading
st.title("Image Description Generator")

uploaded_file = st.file_uploader("Upload an image here", type=["png", "jpg", "jpeg"])

# Text input for asking questions about the image
image_question = st.text_input("Ask something about the image:")

if uploaded_file and image_question:
   image_parts = [
     {
       "mime_type": uploaded_file.type,
       "data": uploaded_file.read() 
     },
   ]

   prompt_parts = [
     image_question,
     image_parts[0],
   ]

   model = genai.GenerativeModel(
     model_name="gemini-pro-vision", 
     generation_config=generation_config, 
     safety_settings=safety_settings 
   )
   
   response = model.generate_content(prompt_parts)
   st.markdown(f"**Model's answer:** {response.text}")