xinjie.wang commited on
Commit
afe0039
·
1 Parent(s): f60a6cc
Files changed (1) hide show
  1. requirements.txt +1 -1
requirements.txt CHANGED
@@ -46,7 +46,7 @@ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn
46
 
47
 
48
  torch==2.4.0
49
- torchvision==0.19.0
50
  pytorch-lightning==2.4.0
51
  spconv-cu120==2.3.6
52
  xformers==0.0.27.post2
 
46
 
47
 
48
  torch==2.4.0
49
+ torchvision==0.16.0
50
  pytorch-lightning==2.4.0
51
  spconv-cu120==2.3.6
52
  xformers==0.0.27.post2