youssef commited on
Commit
11484b5
·
1 Parent(s): e927231

turn on flash attention again

Browse files
Files changed (1) hide show
  1. src/video_processor/processor.py +1 -1
src/video_processor/processor.py CHANGED
@@ -53,7 +53,7 @@ class VideoAnalyzer:
53
  self.model = AutoModelForImageTextToText.from_pretrained(
54
  self.model_path,
55
  torch_dtype=torch.bfloat16,
56
- # _attn_implementation="flash_attention_2"
57
  ).to(DEVICE)
58
  logger.info(f"Model loaded on device: {self.model.device}")
59
 
 
53
  self.model = AutoModelForImageTextToText.from_pretrained(
54
  self.model_path,
55
  torch_dtype=torch.bfloat16,
56
+ _attn_implementation="flash_attention_2"
57
  ).to(DEVICE)
58
  logger.info(f"Model loaded on device: {self.model.device}")
59