BK-Lee commited on
Commit
a770e0a
·
1 Parent(s): 2b9d94f
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -18,6 +18,8 @@ from torchvision.transforms.functional import pil_to_tensor
18
  # flash attention
19
  import subprocess
20
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
 
 
21
 
22
  # accel
23
  accel = Accelerator()
 
18
  # flash attention
19
  import subprocess
20
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
21
+ subprocess.run('pip install causal-conv1d>=1.2.0 --no-build-isolation', shell=True)
22
+ subprocess.run('pip install mamba-ssm --no-build-isolation', shell=True)
23
 
24
  # accel
25
  accel = Accelerator()