ford442 commited on
Commit
e21b5a0
·
verified ·
1 Parent(s): 87a8f58

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -156,17 +156,17 @@ def infer(
156
  print('-- filtered prompt --')
157
  print(enhanced_prompt)
158
  if latent_file: # Check if a latent file is provided
159
- initial_latents = pipe.prepare_latents(
160
- batch_size=1,
161
- num_channels_latents=pipe.transformer.in_channels,
162
- height=pipe.transformer.config.sample_size[0],
163
- width=pipe.transformer.config.sample_size[1],
164
- dtype=pipe.transformer.dtype,
165
- device=pipe.device,
166
- generator=generator,
167
- )
168
  sd_image_a = torch.load(latent_file.name) # Load the latent
169
- initial_latents += sd_image_a
170
  #sd_image_b = pipe.vae.encode(sd_image_a.to(torch.bfloat16)).latent_dist.sample().mul_(0.18215)
171
  print("-- using latent file --")
172
  print('-- generating image --')
@@ -178,7 +178,7 @@ def infer(
178
  num_inference_steps=num_inference_steps,
179
  width=width,
180
  height=height,
181
- latents=initial_latents,
182
  generator=generator
183
  ).images[0]
184
  else:
 
156
  print('-- filtered prompt --')
157
  print(enhanced_prompt)
158
  if latent_file: # Check if a latent file is provided
159
+ # initial_latents = pipe.prepare_latents(
160
+ # batch_size=1,
161
+ # num_channels_latents=pipe.transformer.in_channels,
162
+ # height=pipe.transformer.config.sample_size[0],
163
+ # width=pipe.transformer.config.sample_size[1],
164
+ # dtype=pipe.transformer.dtype,
165
+ # device=pipe.device,
166
+ # generator=generator,
167
+ # )
168
  sd_image_a = torch.load(latent_file.name) # Load the latent
169
+ # initial_latents += sd_image_a
170
  #sd_image_b = pipe.vae.encode(sd_image_a.to(torch.bfloat16)).latent_dist.sample().mul_(0.18215)
171
  print("-- using latent file --")
172
  print('-- generating image --')
 
178
  num_inference_steps=num_inference_steps,
179
  width=width,
180
  height=height,
181
+ latents=sd_image_a,
182
  generator=generator
183
  ).images[0]
184
  else: