Michael Natanael commited on
Commit
7c09bf0
·
1 Parent(s): 3402997

change batch_size whisper

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -69,7 +69,7 @@ pipe = pipeline(
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
- batch_size=256, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
  max_new_tokens=128, # Limit text generation
 
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
+ batch_size=128, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
  max_new_tokens=128, # Limit text generation