fix: del flash attn
Browse files- .gitattributes +1 -0
- requirements.txt +1 -1
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
*.wav filter=lfs diff=lfs merge=lfs -text
|
requirements.txt
CHANGED
@@ -14,4 +14,4 @@ openai
|
|
14 |
PyYAML
|
15 |
einops
|
16 |
huggingface_hub
|
17 |
-
https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.5cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
|
|
|
14 |
PyYAML
|
15 |
einops
|
16 |
huggingface_hub
|
17 |
+
# https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.5cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
|