File size: 427 Bytes
8b414b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
CONFIG = dict(
    model_name="microsoft/deberta-v3-large",
    num_classes=6,
    lr=2e-5,

    batch_size=8,
    num_workers=8,
    max_length=512,
    weight_decay=0.01,

    accelerator='gpu',
    max_epochs=5,
    accumulate_grad_batches=4,
    precision=16,
    gradient_clip_val=1000,
    train_size=0.8,
    num_cross_val_splits=5,
    num_frozen_layers=20,  # out of 24 in deberta
)

# we can also try nn.SmoothL1Loss