File size: 3,534 Bytes
1b6e6dd
694b5b3
c68c489
694b5b3
36ae2b9
c68c489
694b5b3
 
 
 
 
 
 
 
 
 
c68c489
36ae2b9
c68c489
 
31ac415
36ae2b9
 
c68c489
 
 
36ae2b9
c68c489
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31ac415
 
36ae2b9
 
31ac415
36ae2b9
 
c68c489
31ac415
c68c489
 
 
 
 
 
31ac415
c68c489
31ac415
36ae2b9
c68c489
 
1b6e6dd
31ac415
c68c489
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import streamlit as st
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer

# Configure the Hugging Face API key (no need to pass it in the pipeline call)
HF_API_KEY = st.secrets['huggingface_api_key']

# Ensure you're logged in using the Hugging Face CLI if using private models
# huggingface-cli login

# Initialize the Hugging Face model and tokenizer
model_name = 'gpt2-medium'  # or another GPT-2 version you want to use
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)

# Initialize the text generation pipeline
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)

# Function to get response from the Hugging Face model
def get_chatbot_response(user_input):
    try:
        # Generate the response using the Hugging Face model
        response = generator(user_input, max_length=100, num_return_sequences=1)
        return response[0]['generated_text']
    except Exception as e:
        return f"Error: {str(e)}"

# Streamlit interface
st.set_page_config(page_title="Smart ChatBot", layout="centered")

# Custom CSS for chat bubbles with full width and emojis
st.markdown("""
    <style>
    .chat-container {
        display: flex;
        flex-direction: column;
        width: 100%;
    }
    .chat-bubble {
        width: 100%;
        padding: 15px;
        margin: 10px 0;
        border-radius: 10px;
        font-size: 18px;
        color: white;
        display: inline-block;
        line-height: 1.5;
    }
    .user-bubble {
        background: #6a82fb; /* Soft blue */
        align-self: flex-end;
        border-radius: 10px 10px 10px 10px;
    }
    .bot-bubble {
        background: #fc5c7d; /* Soft pink */
        align-self: flex-start;
        border-radius: 10px 10px 10px 10px;
    }
    .chat-header {
        font-size: 35px;
        font-weight: bold;
        margin-bottom: 20px;
        color: #3d3d3d;
    }
    .emoji {
        font-size: 22px;
        margin-right: 10px;
    }
    </style>
""", unsafe_allow_html=True)

# Chat header and intro
st.markdown('<div class="chat-header">AI Chatbot - Your Companion πŸ’»</div>', unsafe_allow_html=True)
st.write("Powered by Hugging Face AI for smart, engaging conversations. πŸ€–")

# Initialize session state for conversation history if not already initialized
if "history" not in st.session_state:
    st.session_state["history"] = []

# Create the chat form
with st.form(key="chat_form", clear_on_submit=True):
    user_input = st.text_input("Your message here... ✍️", max_chars=2000, label_visibility="collapsed")
    submit_button = st.form_submit_button("Send πŸš€")

    if submit_button:
        if user_input:
            # Get response from the chatbot
            response = get_chatbot_response(user_input)
            # Store user input and bot response in session state history
            st.session_state.history.append((user_input, response))
        else:
            st.warning("Please Enter A Prompt πŸ˜…")

# Display chat history
if st.session_state["history"]:
    st.markdown('<div class="chat-container">', unsafe_allow_html=True)
    for user_input, response in st.session_state["history"]:
        st.markdown(f'<div class="chat-bubble user-bubble"><span class="emoji">πŸ‘€</span>You: {user_input}</div>', unsafe_allow_html=True)
        st.markdown(f'<div class="chat-bubble bot-bubble"><span class="emoji">πŸ€–</span>Bot: {response}</div>', unsafe_allow_html=True)
    st.markdown('</div>', unsafe_allow_html=True)