Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,12 +4,13 @@ from transformers import pipeline
|
|
4 |
# Configure the Hugging Face API key
|
5 |
HF_API_KEY = st.secrets['huggingface_api_key']
|
6 |
|
7 |
-
# Initialize the Hugging Face model
|
8 |
-
generator = pipeline('text-generation', model='
|
9 |
|
10 |
# Function to get response from the Hugging Face model
|
11 |
def get_chatbot_response(user_input):
|
12 |
try:
|
|
|
13 |
response = generator(user_input, max_length=100, num_return_sequences=1)
|
14 |
return response[0]['generated_text']
|
15 |
except Exception as e:
|
@@ -47,7 +48,6 @@ st.markdown("""
|
|
47 |
border-radius: 10px 10px 10px 10px;
|
48 |
}
|
49 |
.chat-header {
|
50 |
-
# text-align: center;
|
51 |
font-size: 35px;
|
52 |
font-weight: bold;
|
53 |
margin-bottom: 20px;
|
@@ -60,23 +60,29 @@ st.markdown("""
|
|
60 |
</style>
|
61 |
""", unsafe_allow_html=True)
|
62 |
|
63 |
-
|
|
|
64 |
st.write("Powered by Hugging Face AI for smart, engaging conversations. π€")
|
65 |
|
|
|
66 |
if "history" not in st.session_state:
|
67 |
st.session_state["history"] = []
|
68 |
|
|
|
69 |
with st.form(key="chat_form", clear_on_submit=True):
|
70 |
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed")
|
71 |
submit_button = st.form_submit_button("Send π")
|
72 |
|
73 |
if submit_button:
|
74 |
if user_input:
|
|
|
75 |
response = get_chatbot_response(user_input)
|
|
|
76 |
st.session_state.history.append((user_input, response))
|
77 |
else:
|
78 |
st.warning("Please Enter A Prompt π
")
|
79 |
|
|
|
80 |
if st.session_state["history"]:
|
81 |
st.markdown('<div class="chat-container">', unsafe_allow_html=True)
|
82 |
for user_input, response in st.session_state["history"]:
|
|
|
4 |
# Configure the Hugging Face API key
|
5 |
HF_API_KEY = st.secrets['huggingface_api_key']
|
6 |
|
7 |
+
# Initialize the Hugging Face model with the API key
|
8 |
+
generator = pipeline('text-generation', model='gpt2', tokenizer='gpt2', use_auth_token=HF_API_KEY)
|
9 |
|
10 |
# Function to get response from the Hugging Face model
|
11 |
def get_chatbot_response(user_input):
|
12 |
try:
|
13 |
+
# Generate the response using the Hugging Face model
|
14 |
response = generator(user_input, max_length=100, num_return_sequences=1)
|
15 |
return response[0]['generated_text']
|
16 |
except Exception as e:
|
|
|
48 |
border-radius: 10px 10px 10px 10px;
|
49 |
}
|
50 |
.chat-header {
|
|
|
51 |
font-size: 35px;
|
52 |
font-weight: bold;
|
53 |
margin-bottom: 20px;
|
|
|
60 |
</style>
|
61 |
""", unsafe_allow_html=True)
|
62 |
|
63 |
+
# Chat header and intro
|
64 |
+
st.markdown('<div class="chat-header">AI Chatbot - Your Companion π»</div>', unsafe_allow_html=True)
|
65 |
st.write("Powered by Hugging Face AI for smart, engaging conversations. π€")
|
66 |
|
67 |
+
# Initialize session state for conversation history if not already initialized
|
68 |
if "history" not in st.session_state:
|
69 |
st.session_state["history"] = []
|
70 |
|
71 |
+
# Create the chat form
|
72 |
with st.form(key="chat_form", clear_on_submit=True):
|
73 |
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed")
|
74 |
submit_button = st.form_submit_button("Send π")
|
75 |
|
76 |
if submit_button:
|
77 |
if user_input:
|
78 |
+
# Get response from the chatbot
|
79 |
response = get_chatbot_response(user_input)
|
80 |
+
# Store user input and bot response in session state history
|
81 |
st.session_state.history.append((user_input, response))
|
82 |
else:
|
83 |
st.warning("Please Enter A Prompt π
")
|
84 |
|
85 |
+
# Display chat history
|
86 |
if st.session_state["history"]:
|
87 |
st.markdown('<div class="chat-container">', unsafe_allow_html=True)
|
88 |
for user_input, response in st.session_state["history"]:
|