BK-Lee commited on
Commit
ebe1140
·
1 Parent(s): a770e0a
Files changed (1) hide show
  1. app.py +0 -2
app.py CHANGED
@@ -18,8 +18,6 @@ from torchvision.transforms.functional import pil_to_tensor
18
  # flash attention
19
  import subprocess
20
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
21
- subprocess.run('pip install causal-conv1d>=1.2.0 --no-build-isolation', shell=True)
22
- subprocess.run('pip install mamba-ssm --no-build-isolation', shell=True)
23
 
24
  # accel
25
  accel = Accelerator()
 
18
  # flash attention
19
  import subprocess
20
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
 
 
21
 
22
  # accel
23
  accel = Accelerator()