Update app.py
Browse files
app.py
CHANGED
|
@@ -19,9 +19,13 @@ if token:
|
|
| 19 |
else:
|
| 20 |
raise ValueError("Hugging Face token not found. Please set it as a repository secret in the Space settings.")
|
| 21 |
|
| 22 |
-
# Load the Stable Diffusion 3.5 model with lower precision (float16)
|
| 23 |
model_id = "stabilityai/stable-diffusion-3.5-large"
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
pipe.to(device) # Ensuring the model is on the correct device (GPU or CPU)
|
| 26 |
|
| 27 |
# Define the path to the LoRA model
|
|
@@ -32,10 +36,17 @@ def load_lora_model(pipe, lora_model_path):
|
|
| 32 |
# Load the LoRA weights
|
| 33 |
lora_weights = torch.load(lora_model_path, map_location=device) # Load LoRA model to the correct device
|
| 34 |
|
|
|
|
|
|
|
|
|
|
| 35 |
# Apply weights to the UNet submodule
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
|
| 40 |
return pipe
|
| 41 |
|
|
|
|
| 19 |
else:
|
| 20 |
raise ValueError("Hugging Face token not found. Please set it as a repository secret in the Space settings.")
|
| 21 |
|
| 22 |
+
# Load the Stable Diffusion 3.5 model with lower precision (float16) if GPU is available
|
| 23 |
model_id = "stabilityai/stable-diffusion-3.5-large"
|
| 24 |
+
if device == "cuda":
|
| 25 |
+
pipe = StableDiffusion3Pipeline.from_pretrained(model_id, torch_dtype=torch.float16) # Use float16 precision
|
| 26 |
+
else:
|
| 27 |
+
pipe = StableDiffusion3Pipeline.from_pretrained(model_id) # Default precision for CPU
|
| 28 |
+
|
| 29 |
pipe.to(device) # Ensuring the model is on the correct device (GPU or CPU)
|
| 30 |
|
| 31 |
# Define the path to the LoRA model
|
|
|
|
| 36 |
# Load the LoRA weights
|
| 37 |
lora_weights = torch.load(lora_model_path, map_location=device) # Load LoRA model to the correct device
|
| 38 |
|
| 39 |
+
# Print available attributes of the model to check access to `unet` (optional)
|
| 40 |
+
print(dir(pipe)) # This will list all attributes and methods of the `pipe` object
|
| 41 |
+
|
| 42 |
# Apply weights to the UNet submodule
|
| 43 |
+
try:
|
| 44 |
+
for name, param in pipe.unet.named_parameters(): # Accessing unet parameters
|
| 45 |
+
if name in lora_weights:
|
| 46 |
+
param.data += lora_weights[name]
|
| 47 |
+
except AttributeError:
|
| 48 |
+
print("The model doesn't have 'unet' attributes. Please check the model structure.")
|
| 49 |
+
# Add alternative handling or exit
|
| 50 |
|
| 51 |
return pipe
|
| 52 |
|