BlackNet921's picture
Update app.py
71f725f verified
raw
history blame
1.23 kB
import gradio as gr
from huggingface_hub import InferenceClient
# Define the inference function
def generate_image(prompt, seed, denoise):
client = InferenceClient(model="tryonlabs/FLUX.1-dev-LoRA-Lehenga-Generator")
# Assuming the model accepts these parameters; adjust based on actual API
image = client.text_to_image(
prompt=prompt,
seed=seed if seed >= 0 else None, # Use -1 for random seed
denoise=denoise # Denoising strength, typically between 0 and 1
)
return image
# Create the Gradio interface with advanced options
interface = gr.Interface(
fn=generate_image,
inputs=[
gr.Textbox(label="Prompt", placeholder="Enter your prompt here..."),
gr.Slider(label="Seed", minimum=-1, maximum=1000000, step=1, value=-1, info="Set to -1 for random seed"),
gr.Slider(label="Denoise Strength", minimum=0.0, maximum=1.0, step=0.01, value=0.8, info="Controls noise reduction (0 to 1)")
],
outputs=gr.Image(label="Generated Lehenga Image"),
title="FLUX.1-dev Lehenga Generator",
description="Generate custom Lehenga designs using FLUX.1-dev with LoRA. Adjust seed and denoise for control.",
)
# Launch the interface
interface.launch()