Michael Natanael commited on
Commit
4cbd54d
·
1 Parent(s): 557a97e

change batch_size & max_new_tokens whisper

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -69,10 +69,10 @@ pipe = pipeline(
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
- batch_size=32, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
- max_new_tokens=128, # Limit text generation
76
  return_timestamps=False, # Save memory
77
  )
78
 
 
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
+ batch_size=64, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
+ max_new_tokens=512, # Limit text generation
76
  return_timestamps=False, # Save memory
77
  )
78