radames commited on
Commit
409869d
·
1 Parent(s): 5df89d6

pin diffusers

Browse files
server/pipelines/img2imgSDTurbo.py CHANGED
@@ -4,10 +4,6 @@ from diffusers import (
4
  )
5
  import torch
6
 
7
- try:
8
- import intel_extension_for_pytorch as ipex # type: ignore
9
- except:
10
- pass
11
 
12
  from config import Args
13
  from pydantic import BaseModel, Field
@@ -106,36 +102,10 @@ class Pipeline:
106
  if args.pruna:
107
  # Create and smash your model
108
  smash_config = SmashConfig()
109
- smash_config["cacher"] = "deepcache"
110
  smash_config["compiler"] = "stable_fast"
111
  self.pipe = smash(model=self.pipe, smash_config=smash_config)
112
 
113
- if args.sfast:
114
- from sfast.compilers.stable_diffusion_pipeline_compiler import (
115
- compile,
116
- CompilationConfig,
117
- )
118
-
119
- print("\nRunning sfast compile\n")
120
- from sfast.compilers.stable_diffusion_pipeline_compiler import (
121
- compile,
122
- CompilationConfig,
123
- )
124
-
125
- config = CompilationConfig.Default()
126
- config.enable_xformers = True
127
- config.enable_triton = True
128
- config.enable_cuda_graph = True
129
- self.pipe = compile(self.pipe, config=config)
130
-
131
- if args.onediff:
132
- print("\nRunning onediff compile\n")
133
- from onediff.infer_compiler import oneflow_compile
134
-
135
- self.pipe.unet = oneflow_compile(self.pipe.unet)
136
- self.pipe.vae.encoder = oneflow_compile(self.pipe.vae.encoder)
137
- self.pipe.vae.decoder = oneflow_compile(self.pipe.vae.decoder)
138
-
139
  self.pipe.set_progress_bar_config(disable=True)
140
  self.pipe.to(device=device, dtype=torch_dtype)
141
  # if device.type != "mps":
 
4
  )
5
  import torch
6
 
 
 
 
 
7
 
8
  from config import Args
9
  from pydantic import BaseModel, Field
 
102
  if args.pruna:
103
  # Create and smash your model
104
  smash_config = SmashConfig()
105
+ # smash_config["cacher"] = "deepcache"
106
  smash_config["compiler"] = "stable_fast"
107
  self.pipe = smash(model=self.pipe, smash_config=smash_config)
108
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  self.pipe.set_progress_bar_config(disable=True)
110
  self.pipe.to(device=device, dtype=torch_dtype)
111
  # if device.type != "mps":
server/requirements.txt CHANGED
@@ -1,6 +1,6 @@
1
  # Use with: uv pip install --no-cache --system --index-strategy=unsafe-best-match -r requirements.txt
2
  numpy
3
- diffusers
4
  llvmlite>=0.39.0
5
  numba>=0.56.0
6
  transformers
 
1
  # Use with: uv pip install --no-cache --system --index-strategy=unsafe-best-match -r requirements.txt
2
  numpy
3
+ diffusers<=0.33.1
4
  llvmlite>=0.39.0
5
  numba>=0.56.0
6
  transformers