File size: 724 Bytes
003e84f
06f0278
003e84f
 
 
 
 
8ccf632
003e84f
 
02dee9c
003e84f
 
8ccf632
003e84f
 
 
 
 
8ccf632
003e84f
 
8ccf632
003e84f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from huggingface_hub import InferenceClient

# Initialize client with Fal.AI provider and your API key (replace below)
client = InferenceClient(
    provider="fal-ai",
    api_key="your_fal_ai_api_key",  # Replace with your actual Fal.AI API key
)

# Text prompt for image generation
prompt = "Astronaut riding a horse"

# Use a public or your deployed model on Fal.AI
model_name = "black-forest-labs/FLUX.1-dev"  # Make sure this model is deployed on Fal and accessible

try:
    # Generate image
    image = client.text_to_image(
        prompt=prompt,
        model=model_name,
    )
    # Display the image (if running in Jupyter/Colab)
    image.show()

except Exception as e:
    print(f"Error during inference: {e}")