torch==2.5.1 | |
torchaudio>=2.0.0 | |
transformers>=4.30.0 | |
gradio>=4.0.0 | |
numpy>=1.21.0 | |
accelerate>=0.20.0 | |
PyPDF2 | |
beautifulsoup4 | |
soundfile | |
librosa | |
tqdm | |
requests | |
openai | |
PyYAML | |
einops | |
huggingface_hub | |
# https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.5cxx11abiTRUE-cp310-cp310-linux_x86_64.whl |