ford442 commited on
Commit
7354f4e
·
verified ·
1 Parent(s): 173c68f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -16
app.py CHANGED
@@ -158,19 +158,7 @@ def upload_to_ftp(filename):
158
  except Exception as e:
159
  print(f"FTP upload error: {e}")
160
 
161
- def apply_style(style_name: str, positive: str, negative: str = "") -> Tuple[str, str]:
162
- if style_name in styles:
163
- p, n = styles.get(style_name, styles[DEFAULT_STYLE_NAME])
164
- else:
165
- p, n = styles[DEFAULT_STYLE_NAME]
166
- if not negative:
167
- negative = ""
168
- return p.replace("{prompt}", positive), n + negative
169
-
170
- def save_image(img):
171
- unique_name = str(uuid.uuid4()) + ".png"
172
- img.save(unique_name,optimize=False,compress_level=0)
173
- return unique_name
174
 
175
  def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
176
  filename= f'rv_C_{timestamp}.txt'
@@ -187,6 +175,56 @@ def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
187
  upload_to_ftp(filename)
188
 
189
  code = r'''
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
190
  def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
191
  filename= f'rv_C_{timestamp}.txt'
192
  with open(filename, "w") as f:
@@ -200,6 +238,7 @@ def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
200
  f.write(f"Model VAE: sdxl-vae-bf16\n")
201
  f.write(f"To cuda and bfloat \n")
202
  return filename
 
203
  '''
204
 
205
  pyx = cyper.inline(code)
@@ -229,20 +268,21 @@ def generate_30(
229
  "num_inference_steps": num_inference_steps,
230
  "generator": generator,
231
  "output_type": "pil",
232
- "callback_on_step_end": scheduler_swap_callback
233
  }
234
  if use_resolution_binning:
235
  options["use_resolution_binning"] = True
236
  images = []
237
  timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
238
  filename = pyx.uploadNote(prompt,num_inference_steps,guidance_scale,timestamp)
239
- upload_to_ftp(filename)
 
240
  #uploadNote(prompt,num_inference_steps,guidance_scale,timestamp)
241
  batch_options = options.copy()
242
  rv_image = pipe(**batch_options).images[0]
243
  sd_image_path = f"rv_C_{timestamp}.png"
244
  rv_image.save(sd_image_path,optimize=False,compress_level=0)
245
- upload_to_ftp(sd_image_path)
246
  unique_name = str(uuid.uuid4()) + ".png"
247
  os.symlink(sd_image_path, unique_name)
248
  return [unique_name]
 
158
  except Exception as e:
159
  print(f"FTP upload error: {e}")
160
 
161
+
 
 
 
 
 
 
 
 
 
 
 
 
162
 
163
  def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
164
  filename= f'rv_C_{timestamp}.txt'
 
175
  upload_to_ftp(filename)
176
 
177
  code = r'''
178
+
179
+ def scheduler_swap_callback(pipeline, step_index, timestep, callback_kwargs):
180
+ # adjust the batch_size of prompt_embeds according to guidance_scale
181
+ if step_index == int(pipeline.num_timesteps * 0.1):
182
+ print("-- swapping scheduler --")
183
+ # pipeline.scheduler = euler_scheduler
184
+ torch.set_float32_matmul_precision("high")
185
+ # pipe.vae = vae_b
186
+ torch.backends.cudnn.allow_tf32 = True
187
+ torch.backends.cuda.matmul.allow_tf32 = True
188
+ torch.backends.cudnn.deterministic = True
189
+ torch.backends.cuda.preferred_blas_library="cublaslt"
190
+ #if step_index == int(pipeline.num_timesteps * 0.5):
191
+ # torch.set_float32_matmul_precision("medium")
192
+ #callback_kwargs["latents"] = callback_kwargs["latents"].to(torch.float64)
193
+ #pipe.unet.to(torch.float64)
194
+ # pipe.guidance_scale=1.0
195
+ # pipe.scheduler.set_timesteps(num_inference_steps*.70)
196
+ # print(f"-- setting step {pipeline.num_timesteps * 0.1} --")
197
+ # pipeline.scheduler._step_index = pipeline.num_timesteps * 0.1
198
+ if step_index == int(pipeline.num_timesteps * 0.9):
199
+ torch.backends.cuda.preferred_blas_library="cublas"
200
+ torch.backends.cudnn.allow_tf32 = False
201
+ torch.backends.cuda.matmul.allow_tf32 = False
202
+ torch.set_float32_matmul_precision("highest")
203
+ #callback_kwargs["latents"] = callback_kwargs["latents"].to(torch.bfloat16)
204
+ #pipe.unet.to(torch.float64)
205
+ # pipe.vae = vae_a
206
+ # pipe.unet = unet_a
207
+ torch.backends.cudnn.deterministic = False
208
+ print("-- swapping scheduler --")
209
+ # pipeline.scheduler = heun_scheduler
210
+ #pipe.scheduler.set_timesteps(num_inference_steps*.70)
211
+ # print(f"-- setting step {pipeline.num_timesteps * 0.9} --")
212
+ # pipeline.scheduler._step_index = pipeline.num_timesteps * 0.9
213
+ return {"latents": callback_kwargs["latents"]}
214
+
215
+ def upload_to_ftp(filename):
216
+ try:
217
+ transport = paramiko.Transport((FTP_HOST, 22))
218
+ destination_path=FTP_DIR+filename
219
+ transport.connect(username = FTP_USER, password = FTP_PASS)
220
+ sftp = paramiko.SFTPClient.from_transport(transport)
221
+ sftp.put(filename, destination_path)
222
+ sftp.close()
223
+ transport.close()
224
+ print(f"Uploaded {filename} to FTP server")
225
+ except Exception as e:
226
+ print(f"FTP upload error: {e}")
227
+
228
  def uploadNote(prompt,num_inference_steps,guidance_scale,timestamp):
229
  filename= f'rv_C_{timestamp}.txt'
230
  with open(filename, "w") as f:
 
238
  f.write(f"Model VAE: sdxl-vae-bf16\n")
239
  f.write(f"To cuda and bfloat \n")
240
  return filename
241
+
242
  '''
243
 
244
  pyx = cyper.inline(code)
 
268
  "num_inference_steps": num_inference_steps,
269
  "generator": generator,
270
  "output_type": "pil",
271
+ "callback_on_step_end": pyx.scheduler_swap_callback
272
  }
273
  if use_resolution_binning:
274
  options["use_resolution_binning"] = True
275
  images = []
276
  timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
277
  filename = pyx.uploadNote(prompt,num_inference_steps,guidance_scale,timestamp)
278
+ #upload_to_ftp(filename)
279
+ pyx.upload_to_ftp(filename)
280
  #uploadNote(prompt,num_inference_steps,guidance_scale,timestamp)
281
  batch_options = options.copy()
282
  rv_image = pipe(**batch_options).images[0]
283
  sd_image_path = f"rv_C_{timestamp}.png"
284
  rv_image.save(sd_image_path,optimize=False,compress_level=0)
285
+ pyx.upload_to_ftp(sd_image_path)
286
  unique_name = str(uuid.uuid4()) + ".png"
287
  os.symlink(sd_image_path, unique_name)
288
  return [unique_name]