ziyadsuper2017 commited on
Commit
afb1a49
·
1 Parent(s): 052d1c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -45
app.py CHANGED
@@ -1,74 +1,94 @@
1
- import streamlit as st
2
- import google.generativeai as genai
3
- import sqlite3
 
4
 
5
  # Database setup
6
- conn = sqlite3.connect('chat_history.db')
7
- c = conn.cursor()
8
 
9
  c.execute('''
10
  CREATE TABLE IF NOT EXISTS history
11
  (role TEXT, message TEXT)
12
- ''')
13
 
14
  # Generative AI setup
15
- api_key = "AIzaSyC70u1sN87IkoxOoIj4XCAPw97ae2LZwNM"
16
- genai.configure(api_key=api_key)
17
 
18
  generation_config = {
19
- "temperature": 0.9,
20
- "max_output_tokens": 500
21
  }
22
 
23
- safety_settings = []
24
-
25
- model = genai.GenerativeModel(
26
- model_name="gemini-pro",
27
- generation_config=generation_config,
28
- safety_settings=safety_settings
29
- )
30
 
31
  # Streamlit UI
32
- st.title("Chatbot")
33
 
34
- chat_history = st.session_state.get("chat_history", [])
35
 
36
- if len(chat_history) % 2 == 0:
37
- role = "user"
38
  else:
39
- role = "model"
40
 
41
- for message in chat_history:
42
- r, t = message["role"], message["parts"][0]["text"]
43
- st.markdown(f"**{r.title()}:** {t}")
44
 
45
- user_input = st.text_input("")
 
 
 
46
 
47
- if user_input:
48
- chat_history.append({"role": role, "parts": [{"text": user_input}]})
49
 
50
- if role == "user":
51
- response = model.generate_content(chat_history)
52
- response_text = response.text
53
- chat_history.append({"role": "model", "parts": [{"text": response_text}]})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
55
- st.session_state["chat_history"] = chat_history
56
 
57
- for message in chat_history:
58
- r, t = message["role"], message["parts"][0]["text"]
59
- st.markdown(f"**{r.title()}:** {t}")
60
 
61
- if st.button("Display History"):
62
- c.execute("SELECT * FROM history")
63
- rows = c.fetchall()
64
 
65
- for row in rows:
66
- st.markdown(f"**{row[0].title()}:** {row[1]}")
67
 
68
  # Save chat history to database
69
- for message in chat_history:
70
  c.execute("INSERT INTO history VALUES (?, ?)",
71
- (message["role"], message["parts"][0]["text"]))
72
- conn.commit()
73
 
74
- conn.close()
 
1
+ import streamlit as st # This imports the streamlit module for creating the UI
2
+ import google.generativeai as genai # This imports the google generative AI module for using the model
3
+ import sqlite3 # This imports the sqlite3 module for working with the database
4
+ from st.file_uploader import file_uploader # This imports the file_uploader module for uploading images
5
 
6
  # Database setup
7
+ conn = sqlite3.connect('chat_history.db') # This creates a connection to the chat_history.db file
8
+ c = conn.cursor() # This creates a cursor object to execute SQL commands
9
 
10
  c.execute('''
11
  CREATE TABLE IF NOT EXISTS history
12
  (role TEXT, message TEXT)
13
+ ''') # This creates a table named history with two columns: role and message
14
 
15
  # Generative AI setup
16
+ api_key = "AIzaSyC70u1sN87IkoxOoIj4XCAPw97ae2LZwNM" # This is where you put your API key for the generative AI service
17
+ genai.configure(api_key=api_key) # This configures the genai module with your API key
18
 
19
  generation_config = {
20
+ "temperature": 0.9, # This is a parameter that controls the randomness of the generated text
21
+ "max_output_tokens": 500 # This is a parameter that limits the maximum number of tokens in the generated text
22
  }
23
 
24
+ safety_settings = [] # This is a list of safety settings that can filter out harmful or inappropriate content
 
 
 
 
 
 
25
 
26
  # Streamlit UI
27
+ st.title("Chatbot") # This displays a title for the UI
28
 
29
+ chat_history = st.session_state.get("chat_history", []) # This gets the chat history from the session state or creates an empty list
30
 
31
+ if len(chat_history) % 2 == 0: # This checks if the chat history has an even number of messages
32
+ role = "user" # This sets the role to user
33
  else:
34
+ role = "model" # This sets the role to model
35
 
36
+ for message in chat_history: # This loops through each message in the chat history
37
+ r, t = message["role"], message["parts"][0]["text"] # This extracts the role and the text from the message
38
+ st.markdown(f"**{r.title()}:** {t}") # This displays the role and the text in markdown format
39
 
40
+ user_input = st.text_input("") # This creates a text input widget for the user
41
+
42
+ # File uploader for images
43
+ uploaded_file = st.file_uploader("Upload an image (optional)", accept="image/*") # This creates a file uploader widget for the user to upload an image file
44
 
45
+ if user_input: # This checks if the user has entered some text
46
+ chat_history.append({"role": role, "parts": [{"text": user_input}]}) # This appends the user input to the chat history
47
 
48
+ if role == "user": # This checks if the role is user
49
+ # Check if an image is uploaded
50
+ image_parts = [] # This creates an empty list for the image parts
51
+ if uploaded_file: # This checks if the user has uploaded a file
52
+ image_parts.append({ # This appends a dictionary with the image information to the image parts list
53
+ "mime_type": uploaded_file.type, # This gets the mime type of the file
54
+ "data": uploaded_file.read() # This reads the bytes of the file
55
+ })
56
+
57
+ # Choose the model name based on the image parts
58
+ if image_parts: # This checks if the image parts list is not empty
59
+ model_name = "gemini-pro-vision" # This sets the model name to gemini-pro-vision
60
+ else:
61
+ model_name = "gemini-pro" # This sets the model name to gemini-pro
62
+
63
+ # Create the generative model object
64
+ model = genai.GenerativeModel(
65
+ model_name=model_name, # This passes the model name to the model object
66
+ generation_config=generation_config, # This passes the generation config to the model object
67
+ safety_settings=safety_settings # This passes the safety settings to the model object
68
+ )
69
+
70
+ # Generate response based on text and image
71
+ response = model.generate_content(chat_history + image_parts) # This generates a response from the model based on the chat history and the image parts
72
+ response_text = response.text # This gets the text of the response
73
+ chat_history.append({"role": "model", "parts": [{"text": response_text}]}) # This appends the response text to the chat history
74
 
75
+ st.session_state["chat_history"] = chat_history # This updates the session state with the chat history
76
 
77
+ for message in chat_history: # This loops through each message in the chat history
78
+ r, t = message["role"], message["parts"][0]["text"] # This extracts the role and the text from the message
79
+ st.markdown(f"**{r.title()}:** {t}") # This displays the role and the text in markdown format
80
 
81
+ if st.button("Display History"): # This creates a button widget for displaying the history
82
+ c.execute("SELECT * FROM history") # This executes a SQL command to select all the rows from the history table
83
+ rows = c.fetchall() # This fetches all the rows from the cursor object
84
 
85
+ for row in rows: # This loops through each row in the rows list
86
+ st.markdown(f"**{row[0].title()}:** {row[1]}") # This displays the role and the message in markdown format
87
 
88
  # Save chat history to database
89
+ for message in chat_history: # This loops through each message in the chat history
90
  c.execute("INSERT INTO history VALUES (?, ?)",
91
+ (message["role"], message["parts"][0]["text"])) # This executes a SQL command to insert the role and the message into the history table
92
+ conn.commit() # This commits the changes to the database
93
 
94
+ conn.close() # This closes the connection to the database