Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
# Define model and tokenizer | |
model_name = 'openai-community/gpt2-large' | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=True) | |
def generate_blogpost(topic): | |
inputs = tokenizer.encode(topic, return_tensors='pt') | |
outputs = model.generate(inputs, max_length=500, num_return_sequences=1) | |
text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return text | |
# Streamlit app | |
st.title('Blog Post Generator') | |
topic = st.text_input('Enter a topic:') | |
if topic: | |
blogpost = generate_blogpost(topic) | |
st.write(blogpost) |