File size: 918 Bytes
beb1701
717fa66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44dd539
717fa66
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
wheel
accelerate==0.34.0
av==12.3.0
datasets==2.16.1  
decord==0.6.0   
hf_transfer==0.1.9  
hjson==3.1.0  
httpcore==0.17.3  
huggingface-hub==0.28.1  
hydra-core==1.3.2  
idna==3.10  
numpy==1.26.1  
omegaconf==2.3.0  
opencv-python==4.11.0.86  
peft==0.4.0  
wandb==0.18.7  
transformers==4.45.1
anyio==4.3.0
audioread==3.0.1
av==12.3.0
datasets==2.16.1
einops==0.6.1
einops-exts==0.0.4
ffmpy==0.3.2
fvcore
gradio==4.43.0
gradio_client==1.3.0
ninja==1.11.1.1
notebook==6.4.10
numpy==1.26.1
open_clip_torch==2.26.1
peft==0.4.0
protobuf==4.24.4
pyzmq==25.1.1
safetensors==0.4.3
scikit-learn==1.2.2
scipy==1.11.1
sentencepiece==0.1.99
timm==0.9.11
toml==0.10.2
tqdm==4.66.1
loguru==0.7.3
https://github.com/Dao-AILab/flash-attention/releases/download/v2.4.2/flash_attn-2.4.2+cu122torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
tenacity
sqlitedict
evaluate==0.4.3
sacrebleu==2.5.1
pytablewriter==1.2.1
pytest==8.3.5