Spaces:
Sleeping
Sleeping
File size: 4,216 Bytes
e97552d 1b6e6dd c68c489 e97552d c68c489 e97552d c68c489 a236568 e97552d c68c489 a236568 3103743 a236568 ced9560 3103743 ced9560 e97552d 3103743 a236568 ced9560 a236568 ced9560 a236568 c68c489 e97552d c68c489 e97552d c68c489 e97552d c68c489 ced9560 c68c489 1b6e6dd c68c489 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
import os
import streamlit as st
from transformers import pipeline
# Set the Hugging Face API key in the environment (if required)
HUGGINGFACE_API_KEY = st.secrets["huggingface_api_key"]
os.environ["HF_HOME"] = HUGGINGFACE_API_KEY # Set the Hugging Face API key
# Initialize the text generation pipeline (without passing api_key)
chatbot = pipeline("text-generation", model="microsoft/DialoGPT-medium")
# Initialize the conversation history
if "history" not in st.session_state:
st.session_state["history"] = []
# Function to get response from the model
def get_chatbot_response(user_input):
try:
# Prepare the conversation history for the model
conversation_history = ""
for user_input, response in st.session_state["history"][-5:]: # Limit history to last 5 exchanges
conversation_history += f"User: {user_input}\nBot: {response}\n"
# Add the current user input to the conversation
conversation_history += f"User: {user_input}\n"
# Debug: Print the conversation history
print("Conversation History (Trimmed if Necessary):")
print(conversation_history)
# Generate response from the model
response = chatbot(conversation_history, max_length=1000, pad_token_id=50256, num_return_sequences=1)[0]["generated_text"]
# Debug: Print the full response generated by the model
print("Generated Response (Before Stripping User Input):")
print(response)
# Remove the user input from the generated response (optional)
response = response[len(conversation_history):].strip()
# Debug: Print the final response
print("Final Response (After Stripping User Input):")
print(response)
return response
except Exception as e:
return f"Error: {str(e)}"
# Streamlit interface setup
st.set_page_config(page_title="Smart ChatBot", layout="centered")
# Custom CSS for chat bubbles with full width and emojis
st.markdown("""
<style>
.chat-container {
display: flex;
flex-direction: column;
width: 100%;
}
.chat-bubble {
width: 100%;
padding: 15px;
margin: 10px 0;
border-radius: 10px;
font-size: 18px;
color: white;
display: inline-block;
line-height: 1.5;
}
.user-bubble {
background: #6a82fb; /* Soft blue */
align-self: flex-end;
border-radius: 10px 10px 10px 10px;
}
.bot-bubble {
background: #fc5c7d; /* Soft pink */
align-self: flex-start;
border-radius: 10px 10px 10px 10px;
}
.chat-header {
text-align: center;
font-size: 35px;
font-weight: bold;
margin-bottom: 20px;
color: #3d3d3d;
}
.emoji {
font-size: 22px;
margin-right: 10px;
}
</style>
""", unsafe_allow_html=True)
st.markdown('<div class="chat-header">Gemini Chatbot-Your AI Companion π»</div>', unsafe_allow_html=True)
st.write("Powered by Hugging Faceβs DialoGPT model for smart, engaging conversations. π€")
with st.form(key="chat_form", clear_on_submit=True):
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed")
submit_button = st.form_submit_button("Send π")
if submit_button:
if user_input:
response = get_chatbot_response(user_input)
if response:
st.session_state.history.append((user_input, response))
else:
st.warning("Bot returned an empty response.")
else:
st.warning("Please Enter A Prompt π
")
if st.session_state["history"]:
st.markdown('<div class="chat-container">', unsafe_allow_html=True)
for user_input, response in st.session_state["history"]:
st.markdown(f'<div class="chat-bubble user-bubble"><span class="emoji">π€</span>You: {user_input}</div>', unsafe_allow_html=True)
st.markdown(f'<div class="chat-bubble bot-bubble"><span class="emoji">π€</span>Bot: {response}</div>', unsafe_allow_html=True)
st.markdown('</div>', unsafe_allow_html=True)
|