yhzx233 commited on
Commit
68e6d47
·
1 Parent(s): aede480

fix: flash attn wheel

Browse files
Files changed (2) hide show
  1. app.py +0 -4
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,7 +1,3 @@
1
- import sys
2
- import subprocess
3
- subprocess.check_call([sys.executable, "-m", "pip", "install", "flash-attn"])
4
-
5
  import gradio as gr
6
  import torch
7
  import torchaudio
 
 
 
 
 
1
  import gradio as gr
2
  import torch
3
  import torchaudio
requirements.txt CHANGED
@@ -13,4 +13,5 @@ requests
13
  openai
14
  PyYAML
15
  einops
16
- huggingface_hub
 
 
13
  openai
14
  PyYAML
15
  einops
16
+ huggingface_hub
17
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.7cxx11abiFALSE-cp310-cp310-linux_x86_64.whl