max_seq_length: 8192
Browse files
scripts/pretrain-core-model.yaml
CHANGED
@@ -83,7 +83,7 @@ train:
|
|
83 |
max_steps:
|
84 |
|
85 |
# Limits the length of samples. Off by default (type: Optional[int], default: null)
|
86 |
-
max_seq_length:
|
87 |
|
88 |
# Whether to tie the embedding weights with the language modeling head weights. (type: Optional[bool], default: False)
|
89 |
tie_embeddings: true
|
|
|
83 |
max_steps:
|
84 |
|
85 |
# Limits the length of samples. Off by default (type: Optional[int], default: null)
|
86 |
+
max_seq_length: 8192
|
87 |
|
88 |
# Whether to tie the embedding weights with the language modeling head weights. (type: Optional[bool], default: False)
|
89 |
tie_embeddings: true
|