File size: 406 Bytes
ff8603e
40fb840
ff8603e
40fb840
333b37a
40fb840
 
1d429c0
a859e36
 
 
e9ae896
03bca4f
40fb840
03bca4f
267e75b
7221a6f
5887ab3
7bf1b5d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
torch==2.1.0
torchdiffeq==0.2.5
torchvision==0.16.0
numpy==1.26.3
diffusers==0.32.1
accelerate==1.2.1
transformers==4.47.1
huggingface-hub==0.25.0 
tensorboard
gradio
click
opencv-python
scikit-image
numba==0.60.0
scipy
tqdm
einops
sentencepiece
hf_xet
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu12torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl