J-LAB commited on
Commit
f4872b1
·
verified ·
1 Parent(s): 7262503

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -5,7 +5,7 @@ import spaces
5
  import io
6
  from PIL import Image
7
  import subprocess
8
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
9
  model_id = 'J-LAB/Florence_2_L_FluxiAI_Product_Caption'
10
  model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).to("cuda").eval()
11
  processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
 
5
  import io
6
  from PIL import Image
7
  import subprocess
8
+ subprocess.run(['pip', 'install', 'einops', 'flash_attn'], check=True)
9
  model_id = 'J-LAB/Florence_2_L_FluxiAI_Product_Caption'
10
  model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).to("cuda").eval()
11
  processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)