mtasic85 commited on
Commit
b7123ab
·
1 Parent(s): 4104485

pretrain core 4

Browse files
Files changed (1) hide show
  1. scripts/pretrain_core_model_4.yaml +2 -1
scripts/pretrain_core_model_4.yaml CHANGED
@@ -78,7 +78,8 @@ train:
78
  max_steps:
79
 
80
  # Limits the length of samples. Off by default (type: Optional[int], default: null)
81
- max_seq_length: 16384
 
82
 
83
  # Whether to tie the embedding weights with the language modeling head weights. (type: Optional[bool], default: False)
84
  tie_embeddings: true
 
78
  max_steps:
79
 
80
  # Limits the length of samples. Off by default (type: Optional[int], default: null)
81
+ # max_seq_length: 16384
82
+ max_seq_length:
83
 
84
  # Whether to tie the embedding weights with the language modeling head weights. (type: Optional[bool], default: False)
85
  tie_embeddings: true