ford442 commited on
Commit
f4b3773
·
1 Parent(s): 2d81970

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -8
app.py CHANGED
@@ -92,7 +92,6 @@ def apply_style(style_name: str, positive: str, negative: str = "") -> Tuple[str
92
  p, n = styles.get(style_name, styles[DEFAULT_STYLE_NAME])
93
  else:
94
  p, n = styles[DEFAULT_STYLE_NAME]
95
-
96
  if not negative:
97
  negative = ""
98
  return p.replace("{prompt}", positive), n + negative
@@ -101,12 +100,8 @@ def load_and_prepare_model(model_id):
101
  model_dtypes = {
102
  "ford442/RealVisXL_V5.0_BF16": torch.bfloat16,
103
  }
104
-
105
- # Get the dtype based on the model_id
106
  dtype = model_dtypes.get(model_id, torch.bfloat16) # Default to float32 if not found
107
  vae = AutoencoderKL.from_pretrained("ford442/sdxl-vae-bf16", torch_dtype=torch.bfloat16).to('cuda')
108
-
109
- # Load the pipeline with the determined dtype
110
  pipe = StableDiffusionXLPipeline.from_pretrained(
111
  model_id,
112
  torch_dtype=torch.bfloat16,
@@ -117,9 +112,6 @@ def load_and_prepare_model(model_id):
117
  )
118
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
119
  pipe.to('cuda')
120
- if USE_TORCH_COMPILE:
121
- pipe.compile()
122
-
123
  return pipe
124
 
125
  # Preload and compile both models
@@ -150,6 +142,7 @@ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
150
  seed = random.randint(0, MAX_SEED)
151
  return seed
152
 
 
153
  @spaces.GPU(duration=60)
154
  def generate(
155
  model_choice: str,
 
92
  p, n = styles.get(style_name, styles[DEFAULT_STYLE_NAME])
93
  else:
94
  p, n = styles[DEFAULT_STYLE_NAME]
 
95
  if not negative:
96
  negative = ""
97
  return p.replace("{prompt}", positive), n + negative
 
100
  model_dtypes = {
101
  "ford442/RealVisXL_V5.0_BF16": torch.bfloat16,
102
  }
 
 
103
  dtype = model_dtypes.get(model_id, torch.bfloat16) # Default to float32 if not found
104
  vae = AutoencoderKL.from_pretrained("ford442/sdxl-vae-bf16", torch_dtype=torch.bfloat16).to('cuda')
 
 
105
  pipe = StableDiffusionXLPipeline.from_pretrained(
106
  model_id,
107
  torch_dtype=torch.bfloat16,
 
112
  )
113
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
114
  pipe.to('cuda')
 
 
 
115
  return pipe
116
 
117
  # Preload and compile both models
 
142
  seed = random.randint(0, MAX_SEED)
143
  return seed
144
 
145
+ @torch.inference_mode()
146
  @spaces.GPU(duration=60)
147
  def generate(
148
  model_choice: str,