rahul7star commited on
Commit
94c16a0
·
verified ·
1 Parent(s): ba89f83

Update requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +15 -25
requirements.txt CHANGED
@@ -1,8 +1,10 @@
1
- torch>=2.4.0
2
- torchvision>=0.19.0
 
 
3
  opencv-python>=4.9.0.80
4
- diffusers>=0.31.0
5
- transformers==4.51.3
6
  #transformers==4.46.3 # was needed by llamallava used by i2v hunyuan before patch
7
  tokenizers>=0.20.3
8
  accelerate>=1.1.1
@@ -12,13 +14,16 @@ easydict
12
  ftfy
13
  dashscope
14
  imageio-ffmpeg
15
- # flash_attn
16
- gradio==5.23.0
 
 
 
17
  numpy>=1.23.5,<2
18
  einops
19
  moviepy==1.0.3
20
- mmgp==3.5.6
21
- peft==0.15.0
22
  mutagen
23
  pydantic==2.10.6
24
  decord
@@ -29,23 +34,8 @@ timm
29
  segment-anything
30
  omegaconf
31
  hydra-core
32
- librosa==0.11.0
33
  loguru
34
  sentencepiece
35
  av
36
- opencv-python
37
- pygame>=2.1.0
38
- sounddevice>=0.4.0
39
- # rembg==2.0.65
40
- torchdiffeq >= 0.2.5
41
- tensordict >= 0.6.1
42
- open_clip_torch >= 2.29.0
43
- pyloudnorm
44
- misaki
45
- soundfile
46
- ffmpeg-python
47
- pyannote.audio
48
- pynvml
49
- huggingface_hub[hf_xet]
50
- # num2words
51
- # spacy
 
1
+ torch==2.6.0
2
+ torchvision==0.21.0
3
+ torchdata==0.10.1
4
+
5
  opencv-python>=4.9.0.80
6
+ diffusers
7
+ transformers
8
  #transformers==4.46.3 # was needed by llamallava used by i2v hunyuan before patch
9
  tokenizers>=0.20.3
10
  accelerate>=1.1.1
 
14
  ftfy
15
  dashscope
16
  imageio-ffmpeg
17
+
18
+ # flash_attn
19
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
20
+
21
+ gradio==5.33.0
22
  numpy>=1.23.5,<2
23
  einops
24
  moviepy==1.0.3
25
+ mmgp==3.4.8
26
+ peft==0.14.0
27
  mutagen
28
  pydantic==2.10.6
29
  decord
 
34
  segment-anything
35
  omegaconf
36
  hydra-core
37
+ librosa
38
  loguru
39
  sentencepiece
40
  av
41
+ # rembg==2.0.65