fffiloni commited on
Commit
22d11eb
·
verified ·
1 Parent(s): 666f4a3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -60,6 +60,7 @@ def start_over(gallery_state, loaded_model_setup):
60
  return gallery_state, None, None, gr.update(visible=False), loaded_model_setup
61
 
62
  def setup_model(prompt, model, seed, num_iterations, enable_hps, hps_w, enable_imagereward, imgrw_w, enable_pickscore, pcks_w, enable_clip, clip_w, learning_rate, progress=gr.Progress(track_tqdm=True)):
 
63
  if prompt is None or prompt == "":
64
  raise gr.Error("You forgot to provide a prompt !")
65
 
@@ -113,6 +114,8 @@ def generate_image(setup_args, num_iterations):
113
  torch.cuda.empty_cache() # Free up cached memory
114
  gc.collect()
115
 
 
 
116
  args = setup_args[0]
117
  trainer = setup_args[1]
118
  device = setup_args[2]
@@ -201,11 +204,11 @@ def generate_image(setup_args, num_iterations):
201
 
202
  except torch.cuda.OutOfMemoryError as e:
203
  print(f"Global CUDA Out of Memory Error: {e}")
204
- yield (None, "CUDA out of memory.", None)
205
  except RuntimeError as e:
206
  if 'out of memory' in str(e):
207
  print(f"Runtime Error: {e}")
208
- yield (None, "CUDA out of memory.", None)
209
  else:
210
  yield (None, f"An error occurred: {str(e)}", None)
211
  except Exception as e:
 
60
  return gallery_state, None, None, gr.update(visible=False), loaded_model_setup
61
 
62
  def setup_model(prompt, model, seed, num_iterations, enable_hps, hps_w, enable_imagereward, imgrw_w, enable_pickscore, pcks_w, enable_clip, clip_w, learning_rate, progress=gr.Progress(track_tqdm=True)):
63
+ gr.Info(f"Loading {model} model ...")
64
  if prompt is None or prompt == "":
65
  raise gr.Error("You forgot to provide a prompt !")
66
 
 
114
  torch.cuda.empty_cache() # Free up cached memory
115
  gc.collect()
116
 
117
+ gr.Info(f"Executing iterations task ...")
118
+
119
  args = setup_args[0]
120
  trainer = setup_args[1]
121
  device = setup_args[2]
 
204
 
205
  except torch.cuda.OutOfMemoryError as e:
206
  print(f"Global CUDA Out of Memory Error: {e}")
207
+ yield (None, f"{e}", None)
208
  except RuntimeError as e:
209
  if 'out of memory' in str(e):
210
  print(f"Runtime Error: {e}")
211
+ yield (None, f"{e}", None)
212
  else:
213
  yield (None, f"An error occurred: {str(e)}", None)
214
  except Exception as e: