Spaces:
Sleeping
Sleeping
File size: 692 Bytes
8da6985 6a25254 8da6985 6a25254 8da6985 6a25254 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM
# Define model and tokenizer
model_name = 'openai-community/gpt2-large'
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=True)
def generate_blogpost(topic):
inputs = tokenizer.encode(topic, return_tensors='pt')
outputs = model.generate(inputs, max_length=500, num_return_sequences=1)
text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return text
# Streamlit app
st.title('Blog Post Generator')
topic = st.text_input('Enter a topic:')
if topic:
blogpost = generate_blogpost(topic)
st.write(blogpost) |