Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
7a21437
1
Parent(s):
859912a
chroma
Browse files
app.py
CHANGED
@@ -1,14 +1,16 @@
|
|
1 |
import spaces
|
2 |
import torch
|
3 |
import gradio as gr
|
|
|
4 |
from diffusers import FluxPipeline
|
5 |
|
6 |
# Initialize model outside the function
|
7 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
8 |
dtype = torch.bfloat16
|
9 |
file_url = "https://huggingface.co/lodestones/Chroma/blob/main/chroma-unlocked-v31.safetensors"
|
|
|
10 |
|
11 |
-
flux_pipeline = FluxPipeline.from_single_file(file_url, torch_dtype=dtype)
|
12 |
flux_pipeline.to(device)
|
13 |
|
14 |
@spaces.GPU()
|
|
|
1 |
import spaces
|
2 |
import torch
|
3 |
import gradio as gr
|
4 |
+
import os
|
5 |
from diffusers import FluxPipeline
|
6 |
|
7 |
# Initialize model outside the function
|
8 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
9 |
dtype = torch.bfloat16
|
10 |
file_url = "https://huggingface.co/lodestones/Chroma/blob/main/chroma-unlocked-v31.safetensors"
|
11 |
+
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
12 |
|
13 |
+
flux_pipeline = FluxPipeline.from_single_file(file_url, torch_dtype=dtype, token=huggingface_token)
|
14 |
flux_pipeline.to(device)
|
15 |
|
16 |
@spaces.GPU()
|