mtasic85 commited on
Commit
abd95b0
·
1 Parent(s): 5b960ac

max_seq_length: 8192

Browse files
Files changed (1) hide show
  1. scripts/pretrain-core-model.yaml +1 -1
scripts/pretrain-core-model.yaml CHANGED
@@ -83,7 +83,7 @@ train:
83
  max_steps:
84
 
85
  # Limits the length of samples. Off by default (type: Optional[int], default: null)
86
- max_seq_length:
87
 
88
  # Whether to tie the embedding weights with the language modeling head weights. (type: Optional[bool], default: False)
89
  tie_embeddings: true
 
83
  max_steps:
84
 
85
  # Limits the length of samples. Off by default (type: Optional[int], default: null)
86
+ max_seq_length: 8192
87
 
88
  # Whether to tie the embedding weights with the language modeling head weights. (type: Optional[bool], default: False)
89
  tie_embeddings: true