frogleo commited on
Commit
92ba34d
·
1 Parent(s): bfedece

优化进度显示

Browse files
Files changed (2) hide show
  1. __pycache__/config.cpython-310.pyc +0 -0
  2. app.py +21 -6
__pycache__/config.cpython-310.pyc CHANGED
Binary files a/__pycache__/config.cpython-310.pyc and b/__pycache__/config.cpython-310.pyc differ
 
app.py CHANGED
@@ -93,6 +93,7 @@ def validate_dimensions(width: int, height: int) -> None:
93
 
94
 
95
 
 
96
 
97
  @spaces.GPU
98
  def generate(
@@ -106,17 +107,27 @@ def generate(
106
  seed: int,
107
  randomize_seed: bool,
108
  guidance_scale: float,
109
- num_inference_steps: int,
110
- progress:gr.Progress=gr.Progress(track_tqdm=True),
111
  ):
 
 
112
  if randomize_seed:
113
  seed = random.randint(0, MAX_SEED)
114
 
115
  """Generate images based on the given parameters."""
116
- start_time = time.time()
117
  upscaler_pipe = None
118
  backup_scheduler = None
119
 
 
 
 
 
 
 
 
 
 
 
120
  try:
121
  # Memory management
122
  torch.cuda.empty_cache()
@@ -140,6 +151,7 @@ def generate(
140
 
141
  upscaler_pipe = StableDiffusionXLImg2ImgPipeline(**pipe.components)
142
 
 
143
  latents = pipe(
144
  prompt=prompt,
145
  negative_prompt=negative_prompt,
@@ -149,8 +161,9 @@ def generate(
149
  num_inference_steps=num_inference_steps,
150
  generator=generator,
151
  output_type="latent",
 
152
  ).images
153
-
154
  upscaled_latents = utils.upscale(latents, "nearest-exact", upscale_by)
155
  images = upscaler_pipe(
156
  prompt=prompt,
@@ -161,9 +174,11 @@ def generate(
161
  strength=upscaler_strength,
162
  generator=generator,
163
  output_type="pil",
 
164
  ).images
165
- return images[0]
166
-
 
167
  except GenerationError as e:
168
  logger.warning(f"Generation validation error: {str(e)}")
169
  raise gr.Error(str(e))
 
93
 
94
 
95
 
96
+ progress=gr.Progress()
97
 
98
  @spaces.GPU
99
  def generate(
 
107
  seed: int,
108
  randomize_seed: bool,
109
  guidance_scale: float,
110
+ num_inference_steps: int
 
111
  ):
112
+ progress(0,desc="Starting")
113
+
114
  if randomize_seed:
115
  seed = random.randint(0, MAX_SEED)
116
 
117
  """Generate images based on the given parameters."""
 
118
  upscaler_pipe = None
119
  backup_scheduler = None
120
 
121
+ def callback1(pipe, step, timestep, callback_kwargs):
122
+ progress_value = 0.1 + ((step+1.0)/num_inference_steps)*(0.5/1.0)
123
+ progress(progress_value, desc=f"Image generating, {step + 1}/{num_inference_steps} steps")
124
+ return callback_kwargs
125
+
126
+ def callback2(pipe, step, timestep, callback_kwargs):
127
+ progress_value = 0.6 + ((step+1.0)/num_inference_steps)*(0.4/1.0)
128
+ progress(progress_value, desc=f"Image optimizing, {step + 1}/{num_inference_steps} steps")
129
+ return callback_kwargs
130
+
131
  try:
132
  # Memory management
133
  torch.cuda.empty_cache()
 
151
 
152
  upscaler_pipe = StableDiffusionXLImg2ImgPipeline(**pipe.components)
153
 
154
+ progress(0.1,desc="Image generating")
155
  latents = pipe(
156
  prompt=prompt,
157
  negative_prompt=negative_prompt,
 
161
  num_inference_steps=num_inference_steps,
162
  generator=generator,
163
  output_type="latent",
164
+ callback_on_step_end=callback1
165
  ).images
166
+ progress(0.6,desc="Image optimizing")
167
  upscaled_latents = utils.upscale(latents, "nearest-exact", upscale_by)
168
  images = upscaler_pipe(
169
  prompt=prompt,
 
174
  strength=upscaler_strength,
175
  generator=generator,
176
  output_type="pil",
177
+ callback_on_step_end=callback2
178
  ).images
179
+ out_img = images[0]
180
+ progress(1,desc="Complete")
181
+ return out_img
182
  except GenerationError as e:
183
  logger.warning(f"Generation validation error: {str(e)}")
184
  raise gr.Error(str(e))