Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from transformers import pipeline | |
# Set the Hugging Face API key in the environment (if required) | |
HUGGINGFACE_API_KEY = st.secrets["huggingface_api_key"] | |
os.environ["HF_HOME"] = HUGGINGFACE_API_KEY # Set the Hugging Face API key | |
# Initialize the text generation pipeline (without passing api_key) | |
chatbot = pipeline("text-generation", model="microsoft/DialoGPT-medium") | |
# Initialize the conversation history | |
if "history" not in st.session_state: | |
st.session_state["history"] = [] | |
# Function to get response from the model | |
def get_chatbot_response(user_input): | |
try: | |
# Prepare the conversation history for the model | |
conversation_history = "" | |
for user_input, response in st.session_state["history"][-5:]: # Limit history to last 5 exchanges | |
conversation_history += f"User: {user_input}\nBot: {response}\n" | |
# Add the current user input to the conversation | |
conversation_history += f"User: {user_input}\n" | |
# Debug: Print the conversation history | |
print("Conversation History (Trimmed if Necessary):") | |
print(conversation_history) | |
# Generate response from the model | |
response = chatbot(conversation_history, max_length=1000, pad_token_id=50256, num_return_sequences=1)[0]["generated_text"] | |
# Debug: Print the full response generated by the model | |
print("Generated Response (Before Stripping User Input):") | |
print(response) | |
# Remove the user input from the generated response (optional) | |
response = response[len(conversation_history):].strip() | |
# Debug: Print the final response | |
print("Final Response (After Stripping User Input):") | |
print(response) | |
return response | |
except Exception as e: | |
return f"Error: {str(e)}" | |
# Streamlit interface setup | |
st.set_page_config(page_title="Smart ChatBot", layout="centered") | |
# Custom CSS for chat bubbles with full width and emojis | |
st.markdown(""" | |
<style> | |
.chat-container { | |
display: flex; | |
flex-direction: column; | |
width: 100%; | |
} | |
.chat-bubble { | |
width: 100%; | |
padding: 15px; | |
margin: 10px 0; | |
border-radius: 10px; | |
font-size: 18px; | |
color: white; | |
display: inline-block; | |
line-height: 1.5; | |
} | |
.user-bubble { | |
background: #6a82fb; /* Soft blue */ | |
align-self: flex-end; | |
border-radius: 10px 10px 10px 10px; | |
} | |
.bot-bubble { | |
background: #fc5c7d; /* Soft pink */ | |
align-self: flex-start; | |
border-radius: 10px 10px 10px 10px; | |
} | |
.chat-header { | |
text-align: center; | |
font-size: 35px; | |
font-weight: bold; | |
margin-bottom: 20px; | |
color: #3d3d3d; | |
} | |
.emoji { | |
font-size: 22px; | |
margin-right: 10px; | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
st.markdown('<div class="chat-header">Gemini Chatbot-Your AI Companion π»</div>', unsafe_allow_html=True) | |
st.write("Powered by Hugging Faceβs DialoGPT model for smart, engaging conversations. π€") | |
with st.form(key="chat_form", clear_on_submit=True): | |
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed") | |
submit_button = st.form_submit_button("Send π") | |
if submit_button: | |
if user_input: | |
response = get_chatbot_response(user_input) | |
if response: | |
st.session_state.history.append((user_input, response)) | |
else: | |
st.warning("Bot returned an empty response.") | |
else: | |
st.warning("Please Enter A Prompt π ") | |
if st.session_state["history"]: | |
st.markdown('<div class="chat-container">', unsafe_allow_html=True) | |
for user_input, response in st.session_state["history"]: | |
st.markdown(f'<div class="chat-bubble user-bubble"><span class="emoji">π€</span>You: {user_input}</div>', unsafe_allow_html=True) | |
st.markdown(f'<div class="chat-bubble bot-bubble"><span class="emoji">π€</span>Bot: {response}</div>', unsafe_allow_html=True) | |
st.markdown('</div>', unsafe_allow_html=True) | |