|
def install_packages(): |
|
import subprocess |
|
import sys |
|
import importlib |
|
|
|
def _is_package_available(name) -> bool: |
|
try: |
|
importlib.import_module(name) |
|
return True |
|
except (ImportError, ModuleNotFoundError): |
|
return False |
|
|
|
|
|
subprocess.run( |
|
f"{sys.executable} -m pip install --upgrade pip", shell=True, check=True |
|
) |
|
subprocess.run( |
|
f"{sys.executable} -m pip install --upgrade ninja wheel setuptools packaging", shell=True, check=True |
|
) |
|
|
|
|
|
if not _is_package_available("ninja"): |
|
subprocess.run(f"{sys.executable} -m pip install ninja nvidia-cudnn-cu12==9.1.0.70 nvidia-cublas-cu12==12.4.5.8 torch==2.5.1 --extra-index-url https://download.pytorch.org/whl/cu124", shell=True, check=True) |
|
|
|
|
|
if not _is_package_available("flash_attn"): |
|
subprocess.run( |
|
f"{sys.executable} -m pip install -v -U flash-attention --no-build-isolation", |
|
env={"MAX_JOBS": "1"}, |
|
shell=True, |
|
check=True |
|
) |
|
|
|
|
|
if not _is_package_available("xformers"): |
|
subprocess.run( |
|
f"{sys.executable} -m pip install -v -U xformers nvidia-cudnn-cu12==9.1.0.70 nvidia-cublas-cu12==12.4.5.8 torch==2.5.1 --extra-index-url https://download.pytorch.org/whl/cu124", |
|
shell=True, |
|
check=True |
|
) |
|
|