File size: 919 Bytes
0911b82
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
477e759
33bc329
477e759
33bc329
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd10545
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
torch>=2.4.0
torchvision>=0.19.0
opencv-python>=4.9.0.80
diffusers>=0.31.0
transformers==4.51.3
#transformers==4.46.3 # was needed by llamallava used by i2v hunyuan before patch
tokenizers>=0.20.3
accelerate>=1.1.1
tqdm
imageio
easydict
ftfy
dashscope
imageio-ffmpeg
# flash_attn    
gradio==5.23.0   
numpy>=1.23.5,<2
einops
moviepy==1.0.3
mmgp==3.4.8
peft==0.14.0
mutagen
pydantic==2.10.6
decord
onnxruntime-gpu
rembg[gpu]==2.0.65
matplotlib
timm
segment-anything
omegaconf
hydra-core
librosa
loguru
sentencepiece
av
opencv-python
# rembg==2.0.65

torchsde==0.2.6

torchvision
opencv-contrib-python
safetensors
peft
decord
tqdm
imageio==2.31.1
imageio-ffmpeg==0.4.8
ffmpeg-python==0.2.0
spaces
devicetorch
psutil
basicsr
facexlib>=0.2.5
gfpgan>=1.3.5
realesrgan
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch1.12cxx11abiFALSE-cp310-cp310-linux_x86_64.whl