Spaces:
Runtime error
Runtime error
Commit
·
0df9787
1
Parent(s):
72e65b5
Update app.py
Browse files
app.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1 |
import streamlit as st
|
2 |
-
import google.generativeai as genai
|
3 |
import sqlite3
|
4 |
-
from PIL import Image
|
5 |
|
6 |
# Database setup
|
7 |
-
conn = sqlite3.connect('chat_history.db')
|
8 |
c = conn.cursor()
|
9 |
|
10 |
c.execute('''
|
@@ -12,118 +11,63 @@ c.execute('''
|
|
12 |
(role TEXT, message TEXT)
|
13 |
''')
|
14 |
|
15 |
-
#
|
16 |
-
api_key = "AIzaSyC70u1sN87IkoxOoIj4XCAPw97ae2LZwNM"
|
17 |
-
genai.configure(api_key=api_key)
|
18 |
|
19 |
generation_config = {
|
20 |
"temperature": 0.9,
|
21 |
-
"max_output_tokens": 3000
|
22 |
}
|
23 |
|
24 |
-
|
|
|
25 |
|
26 |
-
# Initialize session state
|
27 |
if "chat_history" not in st.session_state:
|
28 |
st.session_state["chat_history"] = []
|
29 |
|
30 |
-
|
31 |
-
st.session_state["user_input"] = ""
|
32 |
-
|
33 |
-
# Streamlit UI
|
34 |
-
st.set_page_config(page_title="Chatbot", page_icon="🤖")
|
35 |
|
36 |
-
|
37 |
-
st.markdown("""
|
38 |
-
<style>
|
39 |
-
.container {
|
40 |
-
display: flex;
|
41 |
-
}
|
42 |
-
.logo-text {
|
43 |
-
font-weight:700 !important;
|
44 |
-
font-size:50px !important;
|
45 |
-
color: #f9a01b !important;
|
46 |
-
padding-top: 75px !important;
|
47 |
-
}
|
48 |
-
.logo-img {
|
49 |
-
float:right;
|
50 |
-
}
|
51 |
-
</style>
|
52 |
-
<div class="container">
|
53 |
-
<p class="logo-text">Chatbot</p>
|
54 |
-
<img class="logo-img" src="https://example.com/bot-logo.png" width=120 height=120>
|
55 |
-
</div>
|
56 |
-
""", unsafe_allow_html=True)
|
57 |
-
|
58 |
-
# Sidebar UI elements
|
59 |
-
|
60 |
-
# Display chat history
|
61 |
-
for message in st.session_state["chat_history"]:
|
62 |
r, t = message["role"], message["parts"][0]["text"]
|
63 |
-
st.markdown(f"**{r.title()}:** {t}")
|
64 |
|
65 |
-
|
66 |
-
if len(st.session_state["chat_history"]) % 2 == 0:
|
67 |
-
role = "user"
|
68 |
-
else:
|
69 |
-
role = "model"
|
70 |
-
|
71 |
-
# User input
|
72 |
-
user_input = st.text_area("", value=st.session_state.user_input, height=5, key="user_input")
|
73 |
|
74 |
-
#
|
|
|
|
|
75 |
|
76 |
-
|
|
|
|
|
|
|
77 |
if user_input:
|
78 |
-
try:
|
79 |
-
# Add user input
|
80 |
-
st.session_state["chat_history"].append({"role": role, "parts": [{"text": user_input}]})
|
81 |
-
|
82 |
-
# Model code
|
83 |
-
if role == "user":
|
84 |
-
|
85 |
-
model = genai.GenerativeModel('gemini-pro')
|
86 |
-
response = model.generate_content(
|
87 |
-
contents=[user_input],
|
88 |
-
generation_config=generation_config
|
89 |
-
)
|
90 |
-
|
91 |
-
# Add model response
|
92 |
-
st.session_state["chat_history"].append({"role": "model", "parts": [{"text": response}]})
|
93 |
-
|
94 |
-
except Exception as e:
|
95 |
-
st.error(f"An error occurred: {e}")
|
96 |
-
|
97 |
-
# Display chat history
|
98 |
-
for message in st.session_state["chat_history"]:
|
99 |
-
r, t = message["role"], message["parts"][0]["text"]
|
100 |
-
st.markdown(f"**{r.title()}:** {t}")
|
101 |
-
|
102 |
-
# Save chat history to database
|
103 |
-
for message in st.session_state["chat_history"]:
|
104 |
-
c.execute("INSERT INTO history VALUES (?, ?)",
|
105 |
-
(message["role"], message["parts"][0]["text"]))
|
106 |
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
color: white;
|
122 |
-
text-align: center;
|
123 |
-
}
|
124 |
-
</style>
|
125 |
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
import google.generativeai as genai
|
3 |
import sqlite3
|
|
|
4 |
|
5 |
# Database setup
|
6 |
+
conn = sqlite3.connect('chat_history.db')
|
7 |
c = conn.cursor()
|
8 |
|
9 |
c.execute('''
|
|
|
11 |
(role TEXT, message TEXT)
|
12 |
''')
|
13 |
|
14 |
+
# API setup
|
15 |
+
api_key = "AIzaSyC70u1sN87IkoxOoIj4XCAPw97ae2LZwNM"
|
16 |
+
genai.configure(api_key=api_key)
|
17 |
|
18 |
generation_config = {
|
19 |
"temperature": 0.9,
|
20 |
+
"max_output_tokens": 3000
|
21 |
}
|
22 |
|
23 |
+
# Streamlit UI
|
24 |
+
st.title("Chatbot")
|
25 |
|
|
|
26 |
if "chat_history" not in st.session_state:
|
27 |
st.session_state["chat_history"] = []
|
28 |
|
29 |
+
chat_history = st.session_state["chat_history"]
|
|
|
|
|
|
|
|
|
30 |
|
31 |
+
for message in chat_history:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
r, t = message["role"], message["parts"][0]["text"]
|
33 |
+
st.markdown(f"**{r.title()}:** {t}")
|
34 |
|
35 |
+
user_input = st.text_input("You: ")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
+
# Image upload
|
38 |
+
uploaded_files = st.file_uploader("Upload Images", type=["png","jpg","jpeg"], accept_multiple_files=True)
|
39 |
+
image_parts = []
|
40 |
|
41 |
+
if uploaded_files:
|
42 |
+
for file in uploaded_files:
|
43 |
+
image_parts.append({"mime_type": file.type, "data": file.read()})
|
44 |
+
|
45 |
if user_input:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
+
# Append user input
|
48 |
+
chat_history.append({"role": "user", "parts": [{"text": user_input}]})
|
49 |
+
|
50 |
+
if len(image_parts) > 0:
|
51 |
+
prompt_parts = [user_input] + image_parts
|
52 |
+
|
53 |
+
model = genai.GenerativeModel('gemini-pro-vision')
|
54 |
+
response = model.generate_content(prompt_parts, config=generation_config)
|
55 |
+
|
56 |
+
else:
|
57 |
+
model = genai.GenerativeModel('gemini-pro')
|
58 |
+
response = model.generate_content(chat_history, config=generation_config)
|
59 |
+
|
60 |
+
response_text = response['text']
|
|
|
|
|
|
|
|
|
61 |
|
62 |
+
# Append model response
|
63 |
+
chat_history.append({"role": "assistant", "parts": [{"text": response_text}]})
|
64 |
+
|
65 |
+
st.session_state["chat_history"] = chat_history
|
66 |
+
|
67 |
+
# Save chat history to database
|
68 |
+
for message in chat_history:
|
69 |
+
c.execute("INSERT INTO history VALUES (?, ?)",
|
70 |
+
(message["role"], message["parts"][0]["text"]))
|
71 |
+
|
72 |
+
conn.commit()
|
73 |
+
conn.close()
|