xinjie.wang commited on
Commit
e76935a
·
1 Parent(s): afe0039
asset3d_gen/data/backup/backproject_v2 copy.py CHANGED
@@ -8,8 +8,10 @@ import numpy as np
8
  import nvdiffrast.torch as dr
9
  import torch
10
  import torch.nn.functional as F
11
- from torchvision.transforms import functional as tF
12
-
 
 
13
  import trimesh
14
  import xatlas
15
  from PIL import Image
 
8
  import nvdiffrast.torch as dr
9
  import torch
10
  import torch.nn.functional as F
11
+ try:
12
+ from torchvision.transforms import functional as tF
13
+ except ImportError as e:
14
+ tF = None
15
  import trimesh
16
  import xatlas
17
  from PIL import Image
requirements.txt CHANGED
@@ -46,7 +46,7 @@ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn
46
 
47
 
48
  torch==2.4.0
49
- torchvision==0.16.0
50
  pytorch-lightning==2.4.0
51
  spconv-cu120==2.3.6
52
  xformers==0.0.27.post2
 
46
 
47
 
48
  torch==2.4.0
49
+ torchvision==0.19.0
50
  pytorch-lightning==2.4.0
51
  spconv-cu120==2.3.6
52
  xformers==0.0.27.post2