frogleo commited on
Commit
6aefd85
·
1 Parent(s): 781a759

着手加入模型

Browse files
Files changed (3) hide show
  1. __pycache__/utils.cpython-310.pyc +0 -0
  2. app.py +17 -16
  3. utils.py +10 -1
__pycache__/utils.cpython-310.pyc CHANGED
Binary files a/__pycache__/utils.cpython-310.pyc and b/__pycache__/utils.cpython-310.pyc differ
 
app.py CHANGED
@@ -28,21 +28,21 @@ device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
28
  logger.info(f"Using device: {device}")
29
 
30
  # Model initialization
31
- # if torch.cuda.is_available():
32
- # try:
33
- # logger.info("Loading VAE and pipeline...")
34
- # vae = AutoencoderKL.from_pretrained(
35
- # "madebyollin/sdxl-vae-fp16-fix",
36
- # torch_dtype=torch.float16,
37
- # )
38
- # pipe = utils.load_pipeline("cagliostrolab/animagine-xl-4.0", device, vae=vae)
39
- # logger.info("Pipeline loaded successfully on GPU!")
40
- # except Exception as e:
41
- # logger.error(f"Error loading VAE, falling back to default: {e}")
42
- # pipe = utils.load_pipeline("cagliostrolab/animagine-xl-4.0", device)
43
- # else:
44
- # logger.warning("CUDA not available, running on CPU")
45
- # pipe = None
46
 
47
 
48
 
@@ -211,4 +211,5 @@ with gr.Blocks(css=custom_css).queue() as demo:
211
  outputs=[result, seed],
212
  )
213
 
214
- demo.launch()
 
 
28
  logger.info(f"Using device: {device}")
29
 
30
  # Model initialization
31
+ if torch.cuda.is_available():
32
+ try:
33
+ logger.info("Loading VAE and pipeline...")
34
+ vae = AutoencoderKL.from_pretrained(
35
+ "madebyollin/sdxl-vae-fp16-fix",
36
+ torch_dtype=torch.float16,
37
+ )
38
+ pipe = utils.load_pipeline("cagliostrolab/animagine-xl-4.0", device, vae=vae)
39
+ logger.info("Pipeline loaded successfully on GPU!")
40
+ except Exception as e:
41
+ logger.error(f"Error loading VAE, falling back to default: {e}")
42
+ pipe = utils.load_pipeline("cagliostrolab/animagine-xl-4.0", device)
43
+ else:
44
+ logger.warning("CUDA not available, running on CPU")
45
+ pipe = None
46
 
47
 
48
 
 
211
  outputs=[result, seed],
212
  )
213
 
214
+ if __name__ == "__main__":
215
+ demo.queue(max_size=20).launch()
utils.py CHANGED
@@ -1,5 +1,14 @@
 
 
 
 
 
1
  import torch
2
- from typing import Optional, Any
 
 
 
 
3
  from diffusers import (
4
  DDIMScheduler,
5
  DPMSolverMultistepScheduler,
 
1
+ import gc
2
+ import os
3
+ import random
4
+ import numpy as np
5
+ import json
6
  import torch
7
+ import uuid
8
+ from PIL import Image, PngImagePlugin
9
+ from datetime import datetime
10
+ from dataclasses import dataclass
11
+ from typing import Callable, Dict, Optional, Tuple, Any, List
12
  from diffusers import (
13
  DDIMScheduler,
14
  DPMSolverMultistepScheduler,