chat-bot / app.py
sabahat-shakeel's picture
Update app.py
f6432f1 verified
raw
history blame
2.83 kB
# import streamlit as st
# from transformers import GPT2LMHeadModel, GPT2Tokenizer
# # Load the GPT-2 model and tokenizer
# @st.cache_resource
# def load_model():
# model_name = "gpt2"
# tokenizer = GPT2Tokenizer.from_pretrained(model_name)
# model = GPT2LMHeadModel.from_pretrained(model_name)
# return model, tokenizer
# # Function to generate a response from GPT-2
# def generate_response(input_text, model, tokenizer):
# inputs = tokenizer.encode(input_text, return_tensors="pt")
# outputs = model.generate(inputs, max_length=150, do_sample=True, top_p=0.9, top_k=50)
# response = tokenizer.decode(outputs[0], skip_special_tokens=True)
# return response
# # Streamlit UI setup
# def main():
# st.title("GPT-2 Chatbot")
# # Chat history
# if 'history' not in st.session_state:
# st.session_state['history'] = []
# user_input = st.text_input("You:", "")
# # Generate and display response
# if user_input:
# model, tokenizer = load_model()
# response = generate_response(user_input, model, tokenizer)
# st.session_state['history'].append({"user": user_input, "bot": response})
# # Display chat history
# for chat in st.session_state['history']:
# st.write(f"You: {chat['user']}")
# st.write(f"Bot: {chat['bot']}")
# if __name__ == "__main__":
# main()
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load the DialoGPT model and tokenizer
@st.cache_resource
def load_model():
model_name = "microsoft/DialoGPT-medium"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
return model, tokenizer
# Function to generate a response from DialoGPT
def generate_response(input_text, model, tokenizer):
inputs = tokenizer.encode(input_text + tokenizer.eos_token, return_tensors="pt")
outputs = model.generate(inputs, max_length=150, pad_token_id=tokenizer.eos_token_id, do_sample=True, top_p=0.9, top_k=50)
response = tokenizer.decode(outputs[:, inputs.shape[-1]:][0], skip_special_tokens=True)
return response
# Streamlit UI setup
def main():
st.title("DialoGPT Chatbot")
# Chat history
if 'history' not in st.session_state:
st.session_state['history'] = []
user_input = st.text_input("You:", "")
# Generate and display response
if user_input:
model, tokenizer = load_model()
response = generate_response(user_input, model, tokenizer)
st.session_state['history'].append({"user": user_input, "bot": response})
# Display chat history
for chat in st.session_state['history']:
st.write(f"You: {chat['user']}")
st.write(f"Bot: {chat['bot']}")
if __name__ == "__main__":
main()