BlackNet921's picture
Update app.py
e52e958 verified
raw
history blame
895 Bytes
import gradio as gr
from huggingface_hub import InferenceClient
# Define the inference function
def generate_image(prompt, seed):
client = InferenceClient(model="tryonlabs/FLUX.1-dev-LoRA-Lehenga-Generator")
image = client.text_to_image(
prompt=prompt,
seed=seed if seed >= 0 else None # Use -1 for random seed
)
return image
# Create the Gradio interface
interface = gr.Interface(
fn=generate_image,
inputs=[
gr.Textbox(label="Prompt", placeholder="Enter your prompt here..."),
gr.Slider(label="Seed", minimum=-1, maximum=1000000, step=1, value=-1, info="Set to -1 for random seed")
],
outputs=gr.Image(label="Generated Lehenga Image"),
title="FLUX.1-dev Lehenga Generator",
description="Generate custom Lehenga designs using FLUX.1-dev with LoRA. Adjust seed for control.",
)
# Launch the interface
interface.launch()