Futuretop commited on
Commit
78839d9
·
verified ·
1 Parent(s): eb50505

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -0
app.py CHANGED
@@ -28,6 +28,11 @@ hf_utils = importlib.import_module('transformers.utils')
28
  hf_utils.is_flash_attn_2_available = lambda *a, **k: False
29
  hf_utils.is_flash_attn_greater_or_equal_2_10 = lambda *a, **k: False
30
 
 
 
 
 
 
31
  REVISION = "ceaf371f01ef66192264811b390bccad475a4f02"
32
 
33
  # Florence-2 로드
 
28
  hf_utils.is_flash_attn_2_available = lambda *a, **k: False
29
  hf_utils.is_flash_attn_greater_or_equal_2_10 = lambda *a, **k: False
30
 
31
+ mask_utils = importlib.import_module("transformers.modeling_attn_mask_utils")
32
+ for fn in ("_prepare_4d_attention_mask_for_sdpa", "_prepare_4d_causal_attention_mask_for_sdpa"):
33
+ if not hasattr(mask_utils, fn):
34
+ setattr(mask_utils, fn, lambda *a, **k: None)
35
+
36
  REVISION = "ceaf371f01ef66192264811b390bccad475a4f02"
37
 
38
  # Florence-2 로드