Bomme commited on
Commit
7eb54a6
·
1 Parent(s): d65f20e

add @spaces.GPU

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -5,6 +5,7 @@ from pathlib import Path
5
  from typing import Literal
6
 
7
  import gradio as gr
 
8
  import torch
9
 
10
  from NatureLM.models.NatureLM import NatureLM
@@ -27,6 +28,7 @@ class DummyConfig:
27
  }
28
 
29
 
 
30
  def prompt_lm(audios: list[str], messages: list[dict[str, str]]):
31
  cuda_enabled = torch.cuda.is_available()
32
  samples = prepare_sample_waveforms(audios, cuda_enabled)
@@ -213,6 +215,7 @@ def to_raven_format(outputs: dict[int, str], chunk_len: int = 10) -> str:
213
  return "\n".join(raven_output)
214
 
215
 
 
216
  def _run_long_recording_inference(file, task, chunk_len: int = 10, hop_len: int = 5, progress=gr.Progress()):
217
  cuda_enabled = torch.cuda.is_available()
218
  outputs = {}
 
5
  from typing import Literal
6
 
7
  import gradio as gr
8
+ import spaces
9
  import torch
10
 
11
  from NatureLM.models.NatureLM import NatureLM
 
28
  }
29
 
30
 
31
+ @spaces.GPU
32
  def prompt_lm(audios: list[str], messages: list[dict[str, str]]):
33
  cuda_enabled = torch.cuda.is_available()
34
  samples = prepare_sample_waveforms(audios, cuda_enabled)
 
215
  return "\n".join(raven_output)
216
 
217
 
218
+ @spaces.GPU
219
  def _run_long_recording_inference(file, task, chunk_len: int = 10, hop_len: int = 5, progress=gr.Progress()):
220
  cuda_enabled = torch.cuda.is_available()
221
  outputs = {}