Dakerqi commited on
Commit
b071632
·
verified ·
1 Parent(s): efd52f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -8
app.py CHANGED
@@ -1,11 +1,3 @@
1
- import subprocess
2
-
3
- whl_url = "https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.2cxx11abiTRUE-cp311-cp311-linux_x86_64.whl"
4
-
5
- subprocess.run(f"pip install {whl_url}", shell=True, check=True)
6
-
7
- print("Flash Attention installed successfully!")
8
-
9
  import argparse
10
  import os
11
  #os.environ['CUDA_VISIBLE_DEVICES'] = '7'
 
 
 
 
 
 
 
 
 
1
  import argparse
2
  import os
3
  #os.environ['CUDA_VISIBLE_DEVICES'] = '7'