llm-config-optimizer / config.json
codelion's picture
Upload 5 files
b3fb2e8 verified
raw
history blame contribute delete
894 Bytes
{
"config": {
"batch_size": 32,
"device_map": "auto",
"early_stopping_patience": 3,
"epochs": 10,
"ewc_lambda": 100.0,
"gradient_checkpointing": false,
"learning_rate": 0.001,
"max_examples_per_class": 1000,
"max_length": 512,
"min_confidence": 0.1,
"min_examples_per_class": 3,
"neural_weight": 0.3,
"num_representative_examples": 5,
"prototype_update_frequency": 100,
"prototype_weight": 0.7,
"quantization": null,
"similarity_threshold": 0.6,
"warmup_steps": 0
},
"embedding_dim": 768,
"id_to_label": {
"0": "FOCUSED",
"1": "BALANCED",
"2": "CREATIVE",
"3": "DETERMINISTIC",
"4": "EXPERIMENTAL"
},
"label_to_id": {
"BALANCED": 1,
"CREATIVE": 2,
"DETERMINISTIC": 3,
"EXPERIMENTAL": 4,
"FOCUSED": 0
},
"model_name": "distilbert-base-uncased",
"train_steps": 125
}