mfarre HF staff commited on
Commit
6a9b659
·
1 Parent(s): 8c0657c
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -8,7 +8,6 @@ from pathlib import Path
8
  from transformers import AutoProcessor, AutoModelForVision2Seq
9
  import subprocess
10
  import logging
11
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
12
 
13
  logging.basicConfig(level=logging.INFO)
14
  logger = logging.getLogger(__name__)
@@ -403,10 +402,10 @@ def create_ui(examples_path: str, model_path: str):
403
  return app
404
 
405
  if __name__ == "__main__":
 
 
406
  # Initialize CUDA
407
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
408
 
409
- MODEL_PATH = os.getenv("MODEL_PATH", "HuggingFaceTB/SmolVLM2-2.2B-Instruct")
410
-
411
- app = create_ui("video_spec.json", MODEL_PATH)
412
  app.launch()
 
8
  from transformers import AutoProcessor, AutoModelForVision2Seq
9
  import subprocess
10
  import logging
 
11
 
12
  logging.basicConfig(level=logging.INFO)
13
  logger = logging.getLogger(__name__)
 
402
  return app
403
 
404
  if __name__ == "__main__":
405
+ subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
406
+
407
  # Initialize CUDA
408
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
409
 
410
+ app = create_ui("video_spec.json", "HuggingFaceTB/SmolVLM2-2.2B-Instruct")
 
 
411
  app.launch()