Update app.py
Browse files
app.py
CHANGED
|
@@ -291,7 +291,7 @@ class ModelManager:
|
|
| 291 |
# Process selected models
|
| 292 |
for model_key in selected_models:
|
| 293 |
if self.available_models[model_key]['selected']: # Ensure model is selected
|
| 294 |
-
batch_results[model_key] = await self.available_models[model_key]['process'](
|
| 295 |
else:
|
| 296 |
batch_results[model_key] = [None] * len(batch_images)
|
| 297 |
|
|
@@ -348,7 +348,7 @@ class ModelManager:
|
|
| 348 |
optimal = max(1, batch_size // 2)
|
| 349 |
if optimal > 64:
|
| 350 |
optimal = 64
|
| 351 |
-
print("
|
| 352 |
print(f"Optimal batch size determined: {optimal}")
|
| 353 |
return optimal
|
| 354 |
|
|
|
|
| 291 |
# Process selected models
|
| 292 |
for model_key in selected_models:
|
| 293 |
if self.available_models[model_key]['selected']: # Ensure model is selected
|
| 294 |
+
batch_results[model_key] = await self.available_models[model_key]['process'](batch_images, log_events) # Removed 'self' here
|
| 295 |
else:
|
| 296 |
batch_results[model_key] = [None] * len(batch_images)
|
| 297 |
|
|
|
|
| 348 |
optimal = max(1, batch_size // 2)
|
| 349 |
if optimal > 64:
|
| 350 |
optimal = 64
|
| 351 |
+
print(f"Optimal batch size determined: {optimal}")
|
| 352 |
print(f"Optimal batch size determined: {optimal}")
|
| 353 |
return optimal
|
| 354 |
|