Update app.py
Browse files
app.py
CHANGED
@@ -1,18 +1,18 @@
|
|
1 |
import torch
|
|
|
2 |
from diffusers import StableDiffusion3Pipeline
|
3 |
from huggingface_hub import login
|
4 |
import os
|
5 |
import gradio as gr
|
6 |
-
import zerogpu
|
7 |
-
print(zerogpu.__version__)
|
8 |
|
9 |
-
#
|
|
|
|
|
|
|
10 |
if torch.cuda.is_available():
|
11 |
-
|
12 |
-
print("GPU is available")
|
13 |
else:
|
14 |
-
|
15 |
-
print("GPU is not available, using CPU")
|
16 |
|
17 |
# Retrieve the token from the environment variable
|
18 |
token = os.getenv("HF_TOKEN") # Hugging Face token from the secret
|
@@ -72,3 +72,4 @@ iface = gr.Interface(
|
|
72 |
outputs="image"
|
73 |
)
|
74 |
iface.launch()
|
|
|
|
1 |
import torch
|
2 |
+
import zerogpu # Import ZeroGPU
|
3 |
from diffusers import StableDiffusion3Pipeline
|
4 |
from huggingface_hub import login
|
5 |
import os
|
6 |
import gradio as gr
|
|
|
|
|
7 |
|
8 |
+
# Automatically select the available device (GPU if available, otherwise CPU)
|
9 |
+
device = zerogpu.select_device() # This will automatically choose 'cuda' or 'cpu'
|
10 |
+
|
11 |
+
# Check and print if the selected device is GPU or CPU
|
12 |
if torch.cuda.is_available():
|
13 |
+
print(f"Using GPU: {torch.cuda.get_device_name()}")
|
|
|
14 |
else:
|
15 |
+
print("Using CPU")
|
|
|
16 |
|
17 |
# Retrieve the token from the environment variable
|
18 |
token = os.getenv("HF_TOKEN") # Hugging Face token from the secret
|
|
|
72 |
outputs="image"
|
73 |
)
|
74 |
iface.launch()
|
75 |
+
|