Michael Natanael commited on
Commit
557a97e
·
1 Parent(s): 57238f2

change batch_size whisper

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -69,12 +69,11 @@ pipe = pipeline(
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
- batch_size=1, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
  max_new_tokens=128, # Limit text generation
76
  return_timestamps=False, # Save memory
77
- device_map="auto", # Better resource handling
78
  )
79
 
80
 
 
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
+ batch_size=32, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
  max_new_tokens=128, # Limit text generation
76
  return_timestamps=False, # Save memory
 
77
  )
78
 
79