# import streamlit as st # from transformers import GPT2LMHeadModel, GPT2Tokenizer # # Load the GPT-2 model and tokenizer # @st.cache_resource # def load_model(): # model_name = "gpt2" # tokenizer = GPT2Tokenizer.from_pretrained(model_name) # model = GPT2LMHeadModel.from_pretrained(model_name) # return model, tokenizer # # Function to generate a response from GPT-2 # def generate_response(input_text, model, tokenizer): # inputs = tokenizer.encode(input_text, return_tensors="pt") # outputs = model.generate(inputs, max_length=150, do_sample=True, top_p=0.9, top_k=50) # response = tokenizer.decode(outputs[0], skip_special_tokens=True) # return response # # Streamlit UI setup # def main(): # st.title("GPT-2 Chatbot") # # Chat history # if 'history' not in st.session_state: # st.session_state['history'] = [] # user_input = st.text_input("You:", "") # # Generate and display response # if user_input: # model, tokenizer = load_model() # response = generate_response(user_input, model, tokenizer) # st.session_state['history'].append({"user": user_input, "bot": response}) # # Display chat history # for chat in st.session_state['history']: # st.write(f"You: {chat['user']}") # st.write(f"Bot: {chat['bot']}") # if __name__ == "__main__": # main() import streamlit as st from transformers import pipeline # Configure the Hugging Face API key HUGGINGFACE_API_KEY = st.secrets['huggingface_api_key'] # Initialize the Hugging Face text-generation model (DialoGPT or other conversational models) chatbot = pipeline("text-generation", model="microsoft/DialoGPT-medium", api_key=HUGGINGFACE_API_KEY) # Function to get response from the Hugging Face model def get_chatbot_response(user_input): try: response = chatbot(user_input, max_length=1000, pad_token_id=50256) # Generate response return response[0]['generated_text'] # Extract the generated response except Exception as e: return f"Error: {str(e)}" # Streamlit interface st.set_page_config(page_title="Smart ChatBot", layout="centered") # Custom CSS for chat bubbles with full width and emojis st.markdown(""" """, unsafe_allow_html=True) st.markdown('
Hugging Face Chatbot-Your AI Companion 💻
', unsafe_allow_html=True) st.write("Powered by Hugging Face for smart, engaging conversations. 🤖") if "history" not in st.session_state: st.session_state["history"] = [] with st.form(key="chat_form", clear_on_submit=True): user_input = st.text_input("Your message here... ✍️", max_chars=2000, label_visibility="collapsed") submit_button = st.form_submit_button("Send 🚀") if submit_button: if user_input: response = get_chatbot_response(user_input) st.session_state.history.append((user_input, response)) else: st.warning("Please Enter A Prompt 😅") if st.session_state["history"]: st.markdown('
', unsafe_allow_html=True) for user_input, response in st.session_state["history"]: st.markdown(f'
👤You: {user_input}
', unsafe_allow_html=True) st.markdown(f'
🤖Bot: {response}
', unsafe_allow_html=True) st.markdown('
', unsafe_allow_html=True)