Update app.py
Browse files
app.py
CHANGED
@@ -37,7 +37,7 @@ class VideoProcessor:
|
|
37 |
self.batch_size = 4 if torch.cuda.is_available() else 2
|
38 |
|
39 |
def load_models(self):
|
40 |
-
|
41 |
print("Loading CLIP model...")
|
42 |
self.clip_model = CLIPModel.from_pretrained(
|
43 |
"openai/clip-vit-base-patch32",
|
@@ -48,18 +48,16 @@ class VideoProcessor:
|
|
48 |
"openai/clip-vit-base-patch32",
|
49 |
cache_dir="./model_cache"
|
50 |
)
|
51 |
-
|
52 |
print("Loading BLIP2 model...")
|
53 |
model_name = "Salesforce/blip2-opt-2.7b"
|
54 |
|
55 |
-
# Initialize BLIP2 processor
|
56 |
self.blip_processor = Blip2Processor.from_pretrained(
|
57 |
model_name,
|
58 |
cache_dir="./model_cache"
|
59 |
)
|
60 |
-
|
61 |
-
self.blip_processor.config.processor_class = "Blip2Processor"
|
62 |
-
|
63 |
# Load BLIP2 model with optimizations
|
64 |
self.blip_model = Blip2ForConditionalGeneration.from_pretrained(
|
65 |
model_name,
|
@@ -68,7 +66,7 @@ class VideoProcessor:
|
|
68 |
cache_dir="./model_cache",
|
69 |
low_cpu_mem_usage=True
|
70 |
).to(self.device)
|
71 |
-
|
72 |
# Set models to evaluation mode
|
73 |
self.clip_model.eval()
|
74 |
self.blip_model.eval()
|
|
|
37 |
self.batch_size = 4 if torch.cuda.is_available() else 2
|
38 |
|
39 |
def load_models(self):
|
40 |
+
"""Load models with optimizations and proper configurations"""
|
41 |
print("Loading CLIP model...")
|
42 |
self.clip_model = CLIPModel.from_pretrained(
|
43 |
"openai/clip-vit-base-patch32",
|
|
|
48 |
"openai/clip-vit-base-patch32",
|
49 |
cache_dir="./model_cache"
|
50 |
)
|
51 |
+
|
52 |
print("Loading BLIP2 model...")
|
53 |
model_name = "Salesforce/blip2-opt-2.7b"
|
54 |
|
55 |
+
# Initialize BLIP2 processor without config modifications
|
56 |
self.blip_processor = Blip2Processor.from_pretrained(
|
57 |
model_name,
|
58 |
cache_dir="./model_cache"
|
59 |
)
|
60 |
+
|
|
|
|
|
61 |
# Load BLIP2 model with optimizations
|
62 |
self.blip_model = Blip2ForConditionalGeneration.from_pretrained(
|
63 |
model_name,
|
|
|
66 |
cache_dir="./model_cache",
|
67 |
low_cpu_mem_usage=True
|
68 |
).to(self.device)
|
69 |
+
|
70 |
# Set models to evaluation mode
|
71 |
self.clip_model.eval()
|
72 |
self.blip_model.eval()
|