Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,52 +1,21 @@
|
|
1 |
-
import os
|
2 |
import streamlit as st
|
3 |
from transformers import pipeline
|
4 |
|
5 |
-
#
|
6 |
-
|
7 |
-
os.environ["HF_HOME"] = HUGGINGFACE_API_KEY # Set the Hugging Face API key
|
8 |
|
9 |
-
# Initialize the
|
10 |
-
|
11 |
|
12 |
-
#
|
13 |
-
if "history" not in st.session_state:
|
14 |
-
st.session_state["history"] = []
|
15 |
-
|
16 |
-
# Function to get response from the model
|
17 |
def get_chatbot_response(user_input):
|
18 |
try:
|
19 |
-
|
20 |
-
|
21 |
-
for user_input, response in st.session_state["history"][-5:]: # Limit history to last 5 exchanges
|
22 |
-
conversation_history += f"User: {user_input}\nBot: {response}\n"
|
23 |
-
|
24 |
-
# Add the current user input to the conversation
|
25 |
-
conversation_history += f"User: {user_input}\n"
|
26 |
-
|
27 |
-
# Debug: Print the conversation history
|
28 |
-
print("Conversation History (Trimmed if Necessary):")
|
29 |
-
print(conversation_history)
|
30 |
-
|
31 |
-
# Generate response from the model
|
32 |
-
response = chatbot(conversation_history, max_length=1000, pad_token_id=50256, num_return_sequences=1)[0]["generated_text"]
|
33 |
-
|
34 |
-
# Debug: Print the full response generated by the model
|
35 |
-
print("Generated Response (Before Stripping User Input):")
|
36 |
-
print(response)
|
37 |
-
|
38 |
-
# Remove the user input from the generated response (optional)
|
39 |
-
response = response[len(conversation_history):].strip()
|
40 |
-
|
41 |
-
# Debug: Print the final response
|
42 |
-
print("Final Response (After Stripping User Input):")
|
43 |
-
print(response)
|
44 |
-
|
45 |
-
return response
|
46 |
except Exception as e:
|
47 |
return f"Error: {str(e)}"
|
48 |
|
49 |
-
# Streamlit interface
|
50 |
st.set_page_config(page_title="Smart ChatBot", layout="centered")
|
51 |
|
52 |
# Custom CSS for chat bubbles with full width and emojis
|
@@ -78,7 +47,7 @@ st.markdown("""
|
|
78 |
border-radius: 10px 10px 10px 10px;
|
79 |
}
|
80 |
.chat-header {
|
81 |
-
text-align: center;
|
82 |
font-size: 35px;
|
83 |
font-weight: bold;
|
84 |
margin-bottom: 20px;
|
@@ -91,8 +60,11 @@ st.markdown("""
|
|
91 |
</style>
|
92 |
""", unsafe_allow_html=True)
|
93 |
|
94 |
-
st.markdown('<div class="chat-header">
|
95 |
-
st.write("Powered by Hugging Face
|
|
|
|
|
|
|
96 |
|
97 |
with st.form(key="chat_form", clear_on_submit=True):
|
98 |
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed")
|
@@ -101,10 +73,7 @@ with st.form(key="chat_form", clear_on_submit=True):
|
|
101 |
if submit_button:
|
102 |
if user_input:
|
103 |
response = get_chatbot_response(user_input)
|
104 |
-
|
105 |
-
st.session_state.history.append((user_input, response))
|
106 |
-
else:
|
107 |
-
st.warning("Bot returned an empty response.")
|
108 |
else:
|
109 |
st.warning("Please Enter A Prompt π
")
|
110 |
|
|
|
|
|
1 |
import streamlit as st
|
2 |
from transformers import pipeline
|
3 |
|
4 |
+
# Configure the Hugging Face API key
|
5 |
+
HF_API_KEY = st.secrets['huggingface_api_key']
|
|
|
6 |
|
7 |
+
# Initialize the Hugging Face model
|
8 |
+
generator = pipeline('text-generation', model='gpt-2', tokenizer='gpt-2', api_key=HF_API_KEY)
|
9 |
|
10 |
+
# Function to get response from the Hugging Face model
|
|
|
|
|
|
|
|
|
11 |
def get_chatbot_response(user_input):
|
12 |
try:
|
13 |
+
response = generator(user_input, max_length=100, num_return_sequences=1)
|
14 |
+
return response[0]['generated_text']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
except Exception as e:
|
16 |
return f"Error: {str(e)}"
|
17 |
|
18 |
+
# Streamlit interface
|
19 |
st.set_page_config(page_title="Smart ChatBot", layout="centered")
|
20 |
|
21 |
# Custom CSS for chat bubbles with full width and emojis
|
|
|
47 |
border-radius: 10px 10px 10px 10px;
|
48 |
}
|
49 |
.chat-header {
|
50 |
+
# text-align: center;
|
51 |
font-size: 35px;
|
52 |
font-weight: bold;
|
53 |
margin-bottom: 20px;
|
|
|
60 |
</style>
|
61 |
""", unsafe_allow_html=True)
|
62 |
|
63 |
+
st.markdown('<div class="chat-header">AI Chatbot-Your Companion π»</div>', unsafe_allow_html=True)
|
64 |
+
st.write("Powered by Hugging Face AI for smart, engaging conversations. π€")
|
65 |
+
|
66 |
+
if "history" not in st.session_state:
|
67 |
+
st.session_state["history"] = []
|
68 |
|
69 |
with st.form(key="chat_form", clear_on_submit=True):
|
70 |
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed")
|
|
|
73 |
if submit_button:
|
74 |
if user_input:
|
75 |
response = get_chatbot_response(user_input)
|
76 |
+
st.session_state.history.append((user_input, response))
|
|
|
|
|
|
|
77 |
else:
|
78 |
st.warning("Please Enter A Prompt π
")
|
79 |
|