!pip install transformers import torch from transformers import GPT2Tokenizer, GPT2LMHeadModel # Load the GPT-3 model and tokenizer tokenizer = GPT2Tokenizer.from_pretrained("stuheart86/EleutherAI-gpt-neo-1.3B") model = GPT2LMHeadModel.from_pretrained("stuheart86/EleutherAI-gpt-neo-1.3B") # Generate a story based on inputs def generate_story(inputs): genre = inputs["genre"] story_teller = inputs["story_teller"] story_telling_style = inputs["story_telling_style"] creativity = inputs["creativity"] # Use the inputs to prompt the GPT-3 model prompt = f"Write a {genre} story told by a {story_teller} in a {story_telling_style} style with {creativity} creativity." encoded_prompt = tokenizer.encode(prompt, return_tensors="pt") generated_story = model.generate(encoded_prompt, max_length=200, num_return_sequences=1) decoded_story = tokenizer.decode(generated_story.tolist()[0]) return decoded_story