Michael Natanael commited on
Commit
f3a0fa2
·
1 Parent(s): 4cbd54d

change max_new_tokens whisper

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -72,7 +72,7 @@ pipe = pipeline(
72
  batch_size=64, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
- max_new_tokens=512, # Limit text generation
76
  return_timestamps=False, # Save memory
77
  )
78
 
 
72
  batch_size=64, # batch size for inference - set based on your device
73
  torch_dtype=torch_dtype,
74
  device=device,
75
+ max_new_tokens=444, # 448 max - 4 prompt tokens = 444
76
  return_timestamps=False, # Save memory
77
  )
78