hienntd's picture
add folders
1f62012
raw
history blame
314 Bytes
{
"k_fold": 5,
"dropout_probability": 0.3,
"hidden_size": 768,
"learning_rate": 2e-05,
"batch_size": 16,
"num_epochs": 5,
"gradient_clipping": 1.0,
"type_learning_rate_scheduler": "linear_schedule_with_warmup",
"num_warmup_steps": 0,
"loss_function": "CrossEntropyLoss"
}