ford442 commited on
Commit
ddfbad8
·
verified ·
1 Parent(s): 1026b35

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -71,11 +71,11 @@ refiner = StableDiffusionXLImg2ImgPipeline.from_pretrained("ford442/stable-diffu
71
  #refiner = StableDiffusionXLImg2ImgPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", vae=vae, torch_dtype=torch.float32, requires_aesthetics_score=True, device_map='balanced')
72
 
73
  #refiner.enable_model_cpu_offload()
 
74
 
75
  #refiner.scheduler.config.requires_aesthetics_score=False
76
  refiner.to(device)
77
  #refiner = torch.compile(refiner)
78
- refiner.scheduler = EulerAncestralDiscreteScheduler.from_config(refiner.scheduler.config, beta_schedule="scaled_linear")
79
 
80
  tokenizer = AutoTokenizer.from_pretrained(checkpoint, add_prefix_space=False, device_map='balanced')
81
  tokenizer.tokenizer_legacy=False
@@ -168,6 +168,7 @@ def infer(
168
  image_path = f"sd35m_{seed}.png"
169
  sd_image.save(image_path,optimize=False,compress_level=0)
170
  upload_to_ftp(image_path)
 
171
  refine = refiner(
172
  prompt=f"{prompt}, high quality masterpiece, complex details",
173
  negative_prompt = negative_prompt,
 
71
  #refiner = StableDiffusionXLImg2ImgPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", vae=vae, torch_dtype=torch.float32, requires_aesthetics_score=True, device_map='balanced')
72
 
73
  #refiner.enable_model_cpu_offload()
74
+ refiner.scheduler = EulerAncestralDiscreteScheduler.from_config(refiner.scheduler.config, beta_schedule="scaled_linear")
75
 
76
  #refiner.scheduler.config.requires_aesthetics_score=False
77
  refiner.to(device)
78
  #refiner = torch.compile(refiner)
 
79
 
80
  tokenizer = AutoTokenizer.from_pretrained(checkpoint, add_prefix_space=False, device_map='balanced')
81
  tokenizer.tokenizer_legacy=False
 
168
  image_path = f"sd35m_{seed}.png"
169
  sd_image.save(image_path,optimize=False,compress_level=0)
170
  upload_to_ftp(image_path)
171
+ refiner.scheduler.set_timesteps(num_inference_steps,device)
172
  refine = refiner(
173
  prompt=f"{prompt}, high quality masterpiece, complex details",
174
  negative_prompt = negative_prompt,