Staticaliza commited on
Commit
3ef3d4d
·
verified ·
1 Parent(s): df76463

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -6,16 +6,14 @@ import torch
6
  from transformers import pipeline
7
 
8
  # Pre-Initialize
9
- DEVICE = "auto"
10
- if DEVICE == "auto":
11
- DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
12
- print(f"[SYSTEM] | Using {DEVICE} type compute device.")
13
 
14
  # Variables
15
  DEFAULT_TASK = "transcribe"
16
  BATCH_SIZE = 8
17
 
18
- repo = pipeline(task="automatic-speech-recognition", model="openai/whisper-large-v3-turbo", chunk_length_s=30, device=DEVICE)
19
 
20
  css = '''
21
  .gradio-container{max-width: 560px !important}
@@ -26,18 +24,27 @@ footer {
26
  '''
27
 
28
  # Functions
29
- @spaces.GPU(duration=60)
30
  def transcribe(input=None, task=DEFAULT_TASK):
31
  print(input)
32
- if input is None: raise gr.Error("Invalid input.")
 
33
 
34
- output = repo(input, batch_size=BATCH_SIZE, generate_kwargs={"task": task}, return_timestamps=True)["text"]
 
 
 
 
 
35
 
36
  return output
37
 
38
  def cloud():
39
  print("[CLOUD] | Space maintained.")
40
 
 
 
 
 
41
  # Initialize
42
  with gr.Blocks(css=css) as main:
43
  with gr.Column():
 
6
  from transformers import pipeline
7
 
8
  # Pre-Initialize
9
+ DEVICE = -1 # -1 indicates CPU for transformers pipeline
10
+ print("[SYSTEM] | Using CPU type compute device.")
 
 
11
 
12
  # Variables
13
  DEFAULT_TASK = "transcribe"
14
  BATCH_SIZE = 8
15
 
16
+ repo = pipeline(task="automatic-speech-recognition", model="openai/whisper-large-v3-turbo", chunk_length_s=30, device=DEVICE # Ensures CPU usage)
17
 
18
  css = '''
19
  .gradio-container{max-width: 560px !important}
 
24
  '''
25
 
26
  # Functions
 
27
  def transcribe(input=None, task=DEFAULT_TASK):
28
  print(input)
29
+ if input is None:
30
+ raise gr.Error("Invalid input.")
31
 
32
+ output = repo(
33
+ input,
34
+ batch_size=BATCH_SIZE,
35
+ generate_kwargs={"task": task},
36
+ return_timestamps=True
37
+ )["text"]
38
 
39
  return output
40
 
41
  def cloud():
42
  print("[CLOUD] | Space maintained.")
43
 
44
+ @spaces.GPU(duration=60)
45
+ def gpu():
46
+ return
47
+
48
  # Initialize
49
  with gr.Blocks(css=css) as main:
50
  with gr.Column():