Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -114,22 +114,7 @@ def setup_model(loaded_model_setup, prompt, model, seed, num_iterations, enable_
|
|
114 |
loaded_model_setup[0].prompt = prompt
|
115 |
|
116 |
return f"{model} model already loaded with the same configuration.", loaded_model_setup
|
117 |
-
|
118 |
-
# If the model has changed, unload the previous model to free GPU memory
|
119 |
-
if previous_args.model != args.model:
|
120 |
-
print(f"Unloading previous model: {previous_args.model} from GPU.")
|
121 |
-
|
122 |
-
# Unload previous model (trainer and pipe) from GPU
|
123 |
-
trainer = loaded_model_setup[1] # The trainer is at position 1
|
124 |
-
if hasattr(trainer, 'model'):
|
125 |
-
# Move model to CPU first (optional, but ensures cleanup)
|
126 |
-
trainer.model.to('cpu')
|
127 |
-
del trainer.model # Delete the model to free memory
|
128 |
-
|
129 |
-
# Clear GPU memory
|
130 |
-
torch.cuda.empty_cache()
|
131 |
-
gc.collect()
|
132 |
-
|
133 |
# Attempt to set up the model
|
134 |
try:
|
135 |
# If other args differ, proceed with the setup
|
|
|
114 |
loaded_model_setup[0].prompt = prompt
|
115 |
|
116 |
return f"{model} model already loaded with the same configuration.", loaded_model_setup
|
117 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
118 |
# Attempt to set up the model
|
119 |
try:
|
120 |
# If other args differ, proceed with the setup
|