Michael Natanael commited on
Commit
b3eefeb
·
1 Parent(s): f3a0fa2

change batch_size & max_new_tokens whisper

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -69,10 +69,10 @@ pipe = pipeline(
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
- batch_size=64, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
- max_new_tokens=444, # 448 max - 4 prompt tokens = 444
76
  return_timestamps=False, # Save memory
77
  )
78
 
 
69
  tokenizer=processor.tokenizer,
70
  feature_extractor=processor.feature_extractor,
71
  chunk_length_s=30,
72
+ batch_size=128, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
+ max_new_tokens=128, # Limit text generation
76
  return_timestamps=False, # Save memory
77
  )
78