FLUX.1-dev / app.py
Basaram's picture
Update app.py
003e84f verified
raw
history blame
724 Bytes
from huggingface_hub import InferenceClient
# Initialize client with Fal.AI provider and your API key (replace below)
client = InferenceClient(
provider="fal-ai",
api_key="your_fal_ai_api_key", # Replace with your actual Fal.AI API key
)
# Text prompt for image generation
prompt = "Astronaut riding a horse"
# Use a public or your deployed model on Fal.AI
model_name = "black-forest-labs/FLUX.1-dev" # Make sure this model is deployed on Fal and accessible
try:
# Generate image
image = client.text_to_image(
prompt=prompt,
model=model_name,
)
# Display the image (if running in Jupyter/Colab)
image.show()
except Exception as e:
print(f"Error during inference: {e}")