Spaces:
Runtime error
Runtime error
Commit
·
76fcda3
1
Parent(s):
c3cd834
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import google.generativeai as genai
|
|
3 |
import streamlit as st
|
4 |
|
5 |
# Configure the google-generativeai library by providing your API key
|
6 |
-
genai.configure(api_key="
|
7 |
|
8 |
# Set up the model
|
9 |
generation_config = {
|
@@ -32,20 +32,58 @@ safety_settings = [
|
|
32 |
}
|
33 |
]
|
34 |
|
35 |
-
model
|
|
|
|
|
|
|
|
|
36 |
generation_config=generation_config,
|
37 |
safety_settings=safety_settings)
|
38 |
|
39 |
-
# Create a streamlit interface with a title, a description,
|
40 |
st.title("Gemini API") # the title of the interface
|
41 |
st.markdown("A simple interface for Gemini API that explains code snippets in natural language.") # the description of the interface
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import streamlit as st
|
4 |
|
5 |
# Configure the google-generativeai library by providing your API key
|
6 |
+
genai.configure(api_key="YOUR_API_KEY")
|
7 |
|
8 |
# Set up the model
|
9 |
generation_config = {
|
|
|
32 |
}
|
33 |
]
|
34 |
|
35 |
+
# Initialize the chat history and the model instance using session state
|
36 |
+
if "chat_history" not in st.session_state:
|
37 |
+
st.session_state.chat_history = []
|
38 |
+
if "model" not in st.session_state:
|
39 |
+
st.session_state.model = genai.GenerativeModel(model_name="gemini-pro",
|
40 |
generation_config=generation_config,
|
41 |
safety_settings=safety_settings)
|
42 |
|
43 |
+
# Create a streamlit interface with a title, a description, and a chat container
|
44 |
st.title("Gemini API") # the title of the interface
|
45 |
st.markdown("A simple interface for Gemini API that explains code snippets in natural language.") # the description of the interface
|
46 |
+
chat_container = st.empty() # the chat container to display chat messages
|
47 |
+
|
48 |
+
# Display chat messages from history on app rerun
|
49 |
+
for message in st.session_state.chat_history:
|
50 |
+
role = message["role"]
|
51 |
+
content = message["content"]
|
52 |
+
with chat_container.chat_message(role):
|
53 |
+
st.markdown(content)
|
54 |
+
|
55 |
+
# Accept user input and attachment using chat input widget
|
56 |
+
user_input, attachment = st.chat_input("Enter a message or a code snippet")
|
57 |
+
|
58 |
+
if user_input or attachment:
|
59 |
+
# Add user message to chat history
|
60 |
+
if user_input:
|
61 |
+
st.session_state.chat_history.append({"role": "user", "content": user_input})
|
62 |
+
# Display user message in chat container
|
63 |
+
with chat_container.chat_message("user"):
|
64 |
+
st.markdown(user_input)
|
65 |
+
if attachment:
|
66 |
+
# You can process the attachment here or save it as needed
|
67 |
+
# For example, you can save it to a temporary folder and store the file path in the chat history
|
68 |
+
attachment_path = f"attachments/{attachment.name}"
|
69 |
+
with open(attachment_path, "wb") as f:
|
70 |
+
f.write(attachment.read())
|
71 |
+
# Add attachment message to chat history
|
72 |
+
st.session_state.chat_history.append({"role": "user", "content": f"Attachment: {attachment.name}"})
|
73 |
+
# Display attachment message in chat container
|
74 |
+
with chat_container.chat_message("user"):
|
75 |
+
st.markdown(f"Attachment: {attachment.name}")
|
76 |
+
|
77 |
+
# Display assistant response in chat container with a spinner
|
78 |
+
with chat_container.chat_message("assistant"):
|
79 |
+
with st.spinner("Generating a response..."):
|
80 |
+
# Start a chat session with the model
|
81 |
+
convo = st.session_state.model.start_chat(history=[])
|
82 |
+
# Send the user input to the model
|
83 |
+
convo.send_message(user_input)
|
84 |
+
# Get the response from the model
|
85 |
+
response = convo.last.text
|
86 |
+
# Display the response in the chat container
|
87 |
+
st.markdown(response)
|
88 |
+
# Add assistant message to chat history
|
89 |
+
st.session_state.chat_history.append({"role": "assistant", "content": response})
|