djrana's picture
Update app.py
208d806 verified
raw
history blame
909 Bytes
import gradio as gr
from transformers import pipeline
# Load the pipeline for text generation
pipe = pipeline(
"text-generation",
model="Ar4ikov/gpt2-650k-stable-diffusion-prompt-generator",
tokenizer="gpt2"
)
# Function to generate text based on input prompt
def generate_text(prompt):
# Generate multiple outputs for the same prompt
generated_texts = [pipe(prompt, max_length=77)[0]["generated_text"] for _ in range(5)]
return generated_texts
# Create a Gradio interface
iface = gr.Interface(
fn=generate_text,
inputs=gr.Textbox(lines=5, label="Prompt"),
outputs=[gr.Textbox(label=f"Output {i+1}", readonly=True, multiline=True) for i in range(5)],
title="AI Art Prompt Generator",
description="This tool generates multiple outputs for a given prompt using the AI Art Prompt Generator model.",
allow_flagging=False
)
# Launch the interface
iface.launch()