Spaces:
Running
Running
Michael Natanael
commited on
Commit
·
b3eefeb
1
Parent(s):
f3a0fa2
change batch_size & max_new_tokens whisper
Browse files
app.py
CHANGED
@@ -69,10 +69,10 @@ pipe = pipeline(
|
|
69 |
tokenizer=processor.tokenizer,
|
70 |
feature_extractor=processor.feature_extractor,
|
71 |
chunk_length_s=30,
|
72 |
-
batch_size=
|
73 |
torch_dtype=torch_dtype,
|
74 |
device=device,
|
75 |
-
max_new_tokens=
|
76 |
return_timestamps=False, # Save memory
|
77 |
)
|
78 |
|
|
|
69 |
tokenizer=processor.tokenizer,
|
70 |
feature_extractor=processor.feature_extractor,
|
71 |
chunk_length_s=30,
|
72 |
+
batch_size=128, # batch size for inference - set based on your device
|
73 |
torch_dtype=torch_dtype,
|
74 |
device=device,
|
75 |
+
max_new_tokens=128, # Limit text generation
|
76 |
return_timestamps=False, # Save memory
|
77 |
)
|
78 |
|