Spaces:
Sleeping
Sleeping
File size: 4,355 Bytes
f6432f1 1b6e6dd c68c489 a61e06e c68c489 a61e06e c68c489 1b6e6dd c68c489 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
# import streamlit as st
# from transformers import GPT2LMHeadModel, GPT2Tokenizer
# # Load the GPT-2 model and tokenizer
# @st.cache_resource
# def load_model():
# model_name = "gpt2"
# tokenizer = GPT2Tokenizer.from_pretrained(model_name)
# model = GPT2LMHeadModel.from_pretrained(model_name)
# return model, tokenizer
# # Function to generate a response from GPT-2
# def generate_response(input_text, model, tokenizer):
# inputs = tokenizer.encode(input_text, return_tensors="pt")
# outputs = model.generate(inputs, max_length=150, do_sample=True, top_p=0.9, top_k=50)
# response = tokenizer.decode(outputs[0], skip_special_tokens=True)
# return response
# # Streamlit UI setup
# def main():
# st.title("GPT-2 Chatbot")
# # Chat history
# if 'history' not in st.session_state:
# st.session_state['history'] = []
# user_input = st.text_input("You:", "")
# # Generate and display response
# if user_input:
# model, tokenizer = load_model()
# response = generate_response(user_input, model, tokenizer)
# st.session_state['history'].append({"user": user_input, "bot": response})
# # Display chat history
# for chat in st.session_state['history']:
# st.write(f"You: {chat['user']}")
# st.write(f"Bot: {chat['bot']}")
# if __name__ == "__main__":
# main()
import streamlit as st
from transformers import pipeline
# Configure the Hugging Face API key
HUGGINGFACE_API_KEY = st.secrets['huggingface_api_key']
# Initialize the Hugging Face text-generation model (DialoGPT or other conversational models)
chatbot = pipeline("text-generation", model="microsoft/DialoGPT-medium", api_key=HUGGINGFACE_API_KEY)
# Function to get response from the Hugging Face model
def get_chatbot_response(user_input):
try:
response = chatbot(user_input, max_length=1000, pad_token_id=50256) # Generate response
return response[0]['generated_text'] # Extract the generated response
except Exception as e:
return f"Error: {str(e)}"
# Streamlit interface
st.set_page_config(page_title="Smart ChatBot", layout="centered")
# Custom CSS for chat bubbles with full width and emojis
st.markdown("""
<style>
.chat-container {
display: flex;
flex-direction: column;
width: 100%;
}
.chat-bubble {
width: 100%;
padding: 15px;
margin: 10px 0;
border-radius: 10px;
font-size: 18px;
color: white;
display: inline-block;
line-height: 1.5;
}
.user-bubble {
background: #6a82fb; /* Soft blue */
align-self: flex-end;
border-radius: 10px 10px 10px 10px;
}
.bot-bubble {
background: #fc5c7d; /* Soft pink */
align-self: flex-start;
border-radius: 10px 10px 10px 10px;
}
.chat-header {
# text-align: center;
font-size: 35px;
font-weight: bold;
margin-bottom: 20px;
color: #3d3d3d;
}
.emoji {
font-size: 22px;
margin-right: 10px;
}
</style>
""", unsafe_allow_html=True)
st.markdown('<div class="chat-header">Hugging Face Chatbot-Your AI Companion π»</div>', unsafe_allow_html=True)
st.write("Powered by Hugging Face for smart, engaging conversations. π€")
if "history" not in st.session_state:
st.session_state["history"] = []
with st.form(key="chat_form", clear_on_submit=True):
user_input = st.text_input("Your message here... βοΈ", max_chars=2000, label_visibility="collapsed")
submit_button = st.form_submit_button("Send π")
if submit_button:
if user_input:
response = get_chatbot_response(user_input)
st.session_state.history.append((user_input, response))
else:
st.warning("Please Enter A Prompt π
")
if st.session_state["history"]:
st.markdown('<div class="chat-container">', unsafe_allow_html=True)
for user_input, response in st.session_state["history"]:
st.markdown(f'<div class="chat-bubble user-bubble"><span class="emoji">π€</span>You: {user_input}</div>', unsafe_allow_html=True)
st.markdown(f'<div class="chat-bubble bot-bubble"><span class="emoji">π€</span>Bot: {response}</div>', unsafe_allow_html=True)
st.markdown('</div>', unsafe_allow_html=True)
|