Farouk
commited on
Commit
Β·
e96dcc6
1
Parent(s):
065799c
Training in progress, step 7600
Browse files- adapter_model.bin +1 -1
- checkpoint-5400/adapter_model/adapter_model/README.md +0 -44
- checkpoint-5400/adapter_model/adapter_model/adapter_config.json +0 -26
- checkpoint-5400/adapter_model/adapter_model/adapter_model.bin +0 -3
- checkpoint-6200/adapter_model/adapter_model/README.md +12 -0
- checkpoint-6200/adapter_model/adapter_model/adapter_model.bin +1 -1
- {checkpoint-5400 β checkpoint-7600}/README.md +0 -0
- {checkpoint-5400 β checkpoint-7600}/adapter_config.json +0 -0
- {checkpoint-5400 β checkpoint-7600}/adapter_model.bin +1 -1
- {checkpoint-5400 β checkpoint-7600}/added_tokens.json +0 -0
- {checkpoint-5400 β checkpoint-7600}/optimizer.pt +1 -1
- {checkpoint-5400 β checkpoint-7600}/rng_state.pth +1 -1
- {checkpoint-5400 β checkpoint-7600}/scheduler.pt +1 -1
- {checkpoint-5400 β checkpoint-7600}/special_tokens_map.json +0 -0
- {checkpoint-5400 β checkpoint-7600}/tokenizer.model +0 -0
- {checkpoint-5400 β checkpoint-7600}/tokenizer_config.json +0 -0
- {checkpoint-5400 β checkpoint-7600}/trainer_state.json +2106 -5
- {checkpoint-5400 β checkpoint-7600}/training_args.bin +0 -0
adapter_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:07063fab085ce2edb38e992b21ba4ee3b66bff6934db04d38b023343348d2b28
|
| 3 |
size 319977229
|
checkpoint-5400/adapter_model/adapter_model/README.md
DELETED
|
@@ -1,44 +0,0 @@
|
|
| 1 |
-
---
|
| 2 |
-
library_name: peft
|
| 3 |
-
---
|
| 4 |
-
## Training procedure
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
The following `bitsandbytes` quantization config was used during training:
|
| 8 |
-
- load_in_8bit: False
|
| 9 |
-
- load_in_4bit: True
|
| 10 |
-
- llm_int8_threshold: 6.0
|
| 11 |
-
- llm_int8_skip_modules: None
|
| 12 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
| 13 |
-
- llm_int8_has_fp16_weight: False
|
| 14 |
-
- bnb_4bit_quant_type: nf4
|
| 15 |
-
- bnb_4bit_use_double_quant: True
|
| 16 |
-
- bnb_4bit_compute_dtype: bfloat16
|
| 17 |
-
|
| 18 |
-
The following `bitsandbytes` quantization config was used during training:
|
| 19 |
-
- load_in_8bit: False
|
| 20 |
-
- load_in_4bit: True
|
| 21 |
-
- llm_int8_threshold: 6.0
|
| 22 |
-
- llm_int8_skip_modules: None
|
| 23 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
| 24 |
-
- llm_int8_has_fp16_weight: False
|
| 25 |
-
- bnb_4bit_quant_type: nf4
|
| 26 |
-
- bnb_4bit_use_double_quant: True
|
| 27 |
-
- bnb_4bit_compute_dtype: bfloat16
|
| 28 |
-
|
| 29 |
-
The following `bitsandbytes` quantization config was used during training:
|
| 30 |
-
- load_in_8bit: False
|
| 31 |
-
- load_in_4bit: True
|
| 32 |
-
- llm_int8_threshold: 6.0
|
| 33 |
-
- llm_int8_skip_modules: None
|
| 34 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
| 35 |
-
- llm_int8_has_fp16_weight: False
|
| 36 |
-
- bnb_4bit_quant_type: nf4
|
| 37 |
-
- bnb_4bit_use_double_quant: True
|
| 38 |
-
- bnb_4bit_compute_dtype: bfloat16
|
| 39 |
-
### Framework versions
|
| 40 |
-
|
| 41 |
-
- PEFT 0.4.0
|
| 42 |
-
- PEFT 0.4.0
|
| 43 |
-
|
| 44 |
-
- PEFT 0.4.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-5400/adapter_model/adapter_model/adapter_config.json
DELETED
|
@@ -1,26 +0,0 @@
|
|
| 1 |
-
{
|
| 2 |
-
"auto_mapping": null,
|
| 3 |
-
"base_model_name_or_path": "pankajmathur/orca_mini_v3_7b",
|
| 4 |
-
"bias": "none",
|
| 5 |
-
"fan_in_fan_out": false,
|
| 6 |
-
"inference_mode": true,
|
| 7 |
-
"init_lora_weights": true,
|
| 8 |
-
"layers_pattern": null,
|
| 9 |
-
"layers_to_transform": null,
|
| 10 |
-
"lora_alpha": 16.0,
|
| 11 |
-
"lora_dropout": 0.1,
|
| 12 |
-
"modules_to_save": null,
|
| 13 |
-
"peft_type": "LORA",
|
| 14 |
-
"r": 64,
|
| 15 |
-
"revision": null,
|
| 16 |
-
"target_modules": [
|
| 17 |
-
"v_proj",
|
| 18 |
-
"down_proj",
|
| 19 |
-
"o_proj",
|
| 20 |
-
"k_proj",
|
| 21 |
-
"up_proj",
|
| 22 |
-
"gate_proj",
|
| 23 |
-
"q_proj"
|
| 24 |
-
],
|
| 25 |
-
"task_type": "CAUSAL_LM"
|
| 26 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-5400/adapter_model/adapter_model/adapter_model.bin
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:b6c8dcf19cc626d539e295ea9f44b542a78559328223631938076848ca5ad8b9
|
| 3 |
-
size 319977229
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-6200/adapter_model/adapter_model/README.md
CHANGED
|
@@ -59,6 +59,17 @@ The following `bitsandbytes` quantization config was used during training:
|
|
| 59 |
- bnb_4bit_use_double_quant: True
|
| 60 |
- bnb_4bit_compute_dtype: bfloat16
|
| 61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
The following `bitsandbytes` quantization config was used during training:
|
| 63 |
- load_in_8bit: False
|
| 64 |
- load_in_4bit: True
|
|
@@ -76,5 +87,6 @@ The following `bitsandbytes` quantization config was used during training:
|
|
| 76 |
- PEFT 0.4.0
|
| 77 |
- PEFT 0.4.0
|
| 78 |
- PEFT 0.4.0
|
|
|
|
| 79 |
|
| 80 |
- PEFT 0.4.0
|
|
|
|
| 59 |
- bnb_4bit_use_double_quant: True
|
| 60 |
- bnb_4bit_compute_dtype: bfloat16
|
| 61 |
|
| 62 |
+
The following `bitsandbytes` quantization config was used during training:
|
| 63 |
+
- load_in_8bit: False
|
| 64 |
+
- load_in_4bit: True
|
| 65 |
+
- llm_int8_threshold: 6.0
|
| 66 |
+
- llm_int8_skip_modules: None
|
| 67 |
+
- llm_int8_enable_fp32_cpu_offload: False
|
| 68 |
+
- llm_int8_has_fp16_weight: False
|
| 69 |
+
- bnb_4bit_quant_type: nf4
|
| 70 |
+
- bnb_4bit_use_double_quant: True
|
| 71 |
+
- bnb_4bit_compute_dtype: bfloat16
|
| 72 |
+
|
| 73 |
The following `bitsandbytes` quantization config was used during training:
|
| 74 |
- load_in_8bit: False
|
| 75 |
- load_in_4bit: True
|
|
|
|
| 87 |
- PEFT 0.4.0
|
| 88 |
- PEFT 0.4.0
|
| 89 |
- PEFT 0.4.0
|
| 90 |
+
- PEFT 0.4.0
|
| 91 |
|
| 92 |
- PEFT 0.4.0
|
checkpoint-6200/adapter_model/adapter_model/adapter_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:375bf20a484662f02aeae720e363a66caeffb14976d5d425cd1cbaa127b066b5
|
| 3 |
size 319977229
|
{checkpoint-5400 β checkpoint-7600}/README.md
RENAMED
|
File without changes
|
{checkpoint-5400 β checkpoint-7600}/adapter_config.json
RENAMED
|
File without changes
|
{checkpoint-5400 β checkpoint-7600}/adapter_model.bin
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:07063fab085ce2edb38e992b21ba4ee3b66bff6934db04d38b023343348d2b28
|
| 3 |
size 319977229
|
{checkpoint-5400 β checkpoint-7600}/added_tokens.json
RENAMED
|
File without changes
|
{checkpoint-5400 β checkpoint-7600}/optimizer.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1279539973
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a60377cf0f531038c66e8c709a0bd3308aad9106ae9c96a1d17de9e0bd111152
|
| 3 |
size 1279539973
|
{checkpoint-5400 β checkpoint-7600}/rng_state.pth
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14511
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:825acf34a2b84662a9307ebc8e48fe8e0bfc7b2fa63d20786eeb2d10884cd18e
|
| 3 |
size 14511
|
{checkpoint-5400 β checkpoint-7600}/scheduler.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 627
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:28e84ce1056e951be7d81d2edd8521bf4fd1356b40fedd4b87bf74e02969be5b
|
| 3 |
size 627
|
{checkpoint-5400 β checkpoint-7600}/special_tokens_map.json
RENAMED
|
File without changes
|
{checkpoint-5400 β checkpoint-7600}/tokenizer.model
RENAMED
|
File without changes
|
{checkpoint-5400 β checkpoint-7600}/tokenizer_config.json
RENAMED
|
File without changes
|
{checkpoint-5400 β checkpoint-7600}/trainer_state.json
RENAMED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
-
"best_metric": 0.
|
| 3 |
-
"best_model_checkpoint": "experts/expert-16/checkpoint-
|
| 4 |
-
"epoch":
|
| 5 |
-
"global_step":
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
@@ -5163,11 +5163,2112 @@
|
|
| 5163 |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 5164 |
"mmlu_loss": 1.348453690776937,
|
| 5165 |
"step": 5400
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5166 |
}
|
| 5167 |
],
|
| 5168 |
"max_steps": 10000,
|
| 5169 |
"num_train_epochs": 4,
|
| 5170 |
-
"total_flos":
|
| 5171 |
"trial_name": null,
|
| 5172 |
"trial_params": null
|
| 5173 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"best_metric": 0.7293602228164673,
|
| 3 |
+
"best_model_checkpoint": "experts/expert-16/checkpoint-6200",
|
| 4 |
+
"epoch": 2.4081115335868186,
|
| 5 |
+
"global_step": 7600,
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
|
|
| 5163 |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 5164 |
"mmlu_loss": 1.348453690776937,
|
| 5165 |
"step": 5400
|
| 5166 |
+
},
|
| 5167 |
+
{
|
| 5168 |
+
"epoch": 1.71,
|
| 5169 |
+
"learning_rate": 0.0002,
|
| 5170 |
+
"loss": 0.7047,
|
| 5171 |
+
"step": 5410
|
| 5172 |
+
},
|
| 5173 |
+
{
|
| 5174 |
+
"epoch": 1.72,
|
| 5175 |
+
"learning_rate": 0.0002,
|
| 5176 |
+
"loss": 0.7001,
|
| 5177 |
+
"step": 5420
|
| 5178 |
+
},
|
| 5179 |
+
{
|
| 5180 |
+
"epoch": 1.72,
|
| 5181 |
+
"learning_rate": 0.0002,
|
| 5182 |
+
"loss": 0.6759,
|
| 5183 |
+
"step": 5430
|
| 5184 |
+
},
|
| 5185 |
+
{
|
| 5186 |
+
"epoch": 1.72,
|
| 5187 |
+
"learning_rate": 0.0002,
|
| 5188 |
+
"loss": 0.707,
|
| 5189 |
+
"step": 5440
|
| 5190 |
+
},
|
| 5191 |
+
{
|
| 5192 |
+
"epoch": 1.73,
|
| 5193 |
+
"learning_rate": 0.0002,
|
| 5194 |
+
"loss": 0.6648,
|
| 5195 |
+
"step": 5450
|
| 5196 |
+
},
|
| 5197 |
+
{
|
| 5198 |
+
"epoch": 1.73,
|
| 5199 |
+
"learning_rate": 0.0002,
|
| 5200 |
+
"loss": 0.7223,
|
| 5201 |
+
"step": 5460
|
| 5202 |
+
},
|
| 5203 |
+
{
|
| 5204 |
+
"epoch": 1.73,
|
| 5205 |
+
"learning_rate": 0.0002,
|
| 5206 |
+
"loss": 0.722,
|
| 5207 |
+
"step": 5470
|
| 5208 |
+
},
|
| 5209 |
+
{
|
| 5210 |
+
"epoch": 1.74,
|
| 5211 |
+
"learning_rate": 0.0002,
|
| 5212 |
+
"loss": 0.7848,
|
| 5213 |
+
"step": 5480
|
| 5214 |
+
},
|
| 5215 |
+
{
|
| 5216 |
+
"epoch": 1.74,
|
| 5217 |
+
"learning_rate": 0.0002,
|
| 5218 |
+
"loss": 0.6956,
|
| 5219 |
+
"step": 5490
|
| 5220 |
+
},
|
| 5221 |
+
{
|
| 5222 |
+
"epoch": 1.74,
|
| 5223 |
+
"learning_rate": 0.0002,
|
| 5224 |
+
"loss": 0.6584,
|
| 5225 |
+
"step": 5500
|
| 5226 |
+
},
|
| 5227 |
+
{
|
| 5228 |
+
"epoch": 1.75,
|
| 5229 |
+
"learning_rate": 0.0002,
|
| 5230 |
+
"loss": 0.7522,
|
| 5231 |
+
"step": 5510
|
| 5232 |
+
},
|
| 5233 |
+
{
|
| 5234 |
+
"epoch": 1.75,
|
| 5235 |
+
"learning_rate": 0.0002,
|
| 5236 |
+
"loss": 0.7374,
|
| 5237 |
+
"step": 5520
|
| 5238 |
+
},
|
| 5239 |
+
{
|
| 5240 |
+
"epoch": 1.75,
|
| 5241 |
+
"learning_rate": 0.0002,
|
| 5242 |
+
"loss": 0.635,
|
| 5243 |
+
"step": 5530
|
| 5244 |
+
},
|
| 5245 |
+
{
|
| 5246 |
+
"epoch": 1.76,
|
| 5247 |
+
"learning_rate": 0.0002,
|
| 5248 |
+
"loss": 0.6947,
|
| 5249 |
+
"step": 5540
|
| 5250 |
+
},
|
| 5251 |
+
{
|
| 5252 |
+
"epoch": 1.76,
|
| 5253 |
+
"learning_rate": 0.0002,
|
| 5254 |
+
"loss": 0.6948,
|
| 5255 |
+
"step": 5550
|
| 5256 |
+
},
|
| 5257 |
+
{
|
| 5258 |
+
"epoch": 1.76,
|
| 5259 |
+
"learning_rate": 0.0002,
|
| 5260 |
+
"loss": 0.676,
|
| 5261 |
+
"step": 5560
|
| 5262 |
+
},
|
| 5263 |
+
{
|
| 5264 |
+
"epoch": 1.76,
|
| 5265 |
+
"learning_rate": 0.0002,
|
| 5266 |
+
"loss": 0.7053,
|
| 5267 |
+
"step": 5570
|
| 5268 |
+
},
|
| 5269 |
+
{
|
| 5270 |
+
"epoch": 1.77,
|
| 5271 |
+
"learning_rate": 0.0002,
|
| 5272 |
+
"loss": 0.6868,
|
| 5273 |
+
"step": 5580
|
| 5274 |
+
},
|
| 5275 |
+
{
|
| 5276 |
+
"epoch": 1.77,
|
| 5277 |
+
"learning_rate": 0.0002,
|
| 5278 |
+
"loss": 0.7307,
|
| 5279 |
+
"step": 5590
|
| 5280 |
+
},
|
| 5281 |
+
{
|
| 5282 |
+
"epoch": 1.77,
|
| 5283 |
+
"learning_rate": 0.0002,
|
| 5284 |
+
"loss": 0.6902,
|
| 5285 |
+
"step": 5600
|
| 5286 |
+
},
|
| 5287 |
+
{
|
| 5288 |
+
"epoch": 1.77,
|
| 5289 |
+
"eval_loss": 0.7314637899398804,
|
| 5290 |
+
"eval_runtime": 111.0487,
|
| 5291 |
+
"eval_samples_per_second": 9.005,
|
| 5292 |
+
"eval_steps_per_second": 4.503,
|
| 5293 |
+
"step": 5600
|
| 5294 |
+
},
|
| 5295 |
+
{
|
| 5296 |
+
"epoch": 1.77,
|
| 5297 |
+
"mmlu_eval_accuracy": 0.48467107795368586,
|
| 5298 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 5299 |
+
"mmlu_eval_accuracy_anatomy": 0.7142857142857143,
|
| 5300 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 5301 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 5302 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 5303 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 5304 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
| 5305 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 5306 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 5307 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 5308 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 5309 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
| 5310 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 5311 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 5312 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 5313 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073,
|
| 5314 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 5315 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 5316 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 5317 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 5318 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 5319 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 5320 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 5321 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 5322 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4418604651162791,
|
| 5323 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
| 5324 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 5325 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
| 5326 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.85,
|
| 5327 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
| 5328 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 5329 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 5330 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
| 5331 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 5332 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 5333 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 5334 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
| 5335 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 5336 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 5337 |
+
"mmlu_eval_accuracy_marketing": 0.76,
|
| 5338 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 5339 |
+
"mmlu_eval_accuracy_miscellaneous": 0.627906976744186,
|
| 5340 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 5341 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 5342 |
+
"mmlu_eval_accuracy_nutrition": 0.7272727272727273,
|
| 5343 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 5344 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 5345 |
+
"mmlu_eval_accuracy_professional_accounting": 0.1935483870967742,
|
| 5346 |
+
"mmlu_eval_accuracy_professional_law": 0.3411764705882353,
|
| 5347 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5483870967741935,
|
| 5348 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
| 5349 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 5350 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 5351 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 5352 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 5353 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 5354 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 5355 |
+
"mmlu_loss": 1.3874250733665636,
|
| 5356 |
+
"step": 5600
|
| 5357 |
+
},
|
| 5358 |
+
{
|
| 5359 |
+
"epoch": 1.78,
|
| 5360 |
+
"learning_rate": 0.0002,
|
| 5361 |
+
"loss": 0.6558,
|
| 5362 |
+
"step": 5610
|
| 5363 |
+
},
|
| 5364 |
+
{
|
| 5365 |
+
"epoch": 1.78,
|
| 5366 |
+
"learning_rate": 0.0002,
|
| 5367 |
+
"loss": 0.714,
|
| 5368 |
+
"step": 5620
|
| 5369 |
+
},
|
| 5370 |
+
{
|
| 5371 |
+
"epoch": 1.78,
|
| 5372 |
+
"learning_rate": 0.0002,
|
| 5373 |
+
"loss": 0.7019,
|
| 5374 |
+
"step": 5630
|
| 5375 |
+
},
|
| 5376 |
+
{
|
| 5377 |
+
"epoch": 1.79,
|
| 5378 |
+
"learning_rate": 0.0002,
|
| 5379 |
+
"loss": 0.7084,
|
| 5380 |
+
"step": 5640
|
| 5381 |
+
},
|
| 5382 |
+
{
|
| 5383 |
+
"epoch": 1.79,
|
| 5384 |
+
"learning_rate": 0.0002,
|
| 5385 |
+
"loss": 0.7184,
|
| 5386 |
+
"step": 5650
|
| 5387 |
+
},
|
| 5388 |
+
{
|
| 5389 |
+
"epoch": 1.79,
|
| 5390 |
+
"learning_rate": 0.0002,
|
| 5391 |
+
"loss": 0.6524,
|
| 5392 |
+
"step": 5660
|
| 5393 |
+
},
|
| 5394 |
+
{
|
| 5395 |
+
"epoch": 1.8,
|
| 5396 |
+
"learning_rate": 0.0002,
|
| 5397 |
+
"loss": 0.7265,
|
| 5398 |
+
"step": 5670
|
| 5399 |
+
},
|
| 5400 |
+
{
|
| 5401 |
+
"epoch": 1.8,
|
| 5402 |
+
"learning_rate": 0.0002,
|
| 5403 |
+
"loss": 0.7164,
|
| 5404 |
+
"step": 5680
|
| 5405 |
+
},
|
| 5406 |
+
{
|
| 5407 |
+
"epoch": 1.8,
|
| 5408 |
+
"learning_rate": 0.0002,
|
| 5409 |
+
"loss": 0.6825,
|
| 5410 |
+
"step": 5690
|
| 5411 |
+
},
|
| 5412 |
+
{
|
| 5413 |
+
"epoch": 1.81,
|
| 5414 |
+
"learning_rate": 0.0002,
|
| 5415 |
+
"loss": 0.7427,
|
| 5416 |
+
"step": 5700
|
| 5417 |
+
},
|
| 5418 |
+
{
|
| 5419 |
+
"epoch": 1.81,
|
| 5420 |
+
"learning_rate": 0.0002,
|
| 5421 |
+
"loss": 0.7416,
|
| 5422 |
+
"step": 5710
|
| 5423 |
+
},
|
| 5424 |
+
{
|
| 5425 |
+
"epoch": 1.81,
|
| 5426 |
+
"learning_rate": 0.0002,
|
| 5427 |
+
"loss": 0.7027,
|
| 5428 |
+
"step": 5720
|
| 5429 |
+
},
|
| 5430 |
+
{
|
| 5431 |
+
"epoch": 1.82,
|
| 5432 |
+
"learning_rate": 0.0002,
|
| 5433 |
+
"loss": 0.7039,
|
| 5434 |
+
"step": 5730
|
| 5435 |
+
},
|
| 5436 |
+
{
|
| 5437 |
+
"epoch": 1.82,
|
| 5438 |
+
"learning_rate": 0.0002,
|
| 5439 |
+
"loss": 0.7108,
|
| 5440 |
+
"step": 5740
|
| 5441 |
+
},
|
| 5442 |
+
{
|
| 5443 |
+
"epoch": 1.82,
|
| 5444 |
+
"learning_rate": 0.0002,
|
| 5445 |
+
"loss": 0.6257,
|
| 5446 |
+
"step": 5750
|
| 5447 |
+
},
|
| 5448 |
+
{
|
| 5449 |
+
"epoch": 1.83,
|
| 5450 |
+
"learning_rate": 0.0002,
|
| 5451 |
+
"loss": 0.6665,
|
| 5452 |
+
"step": 5760
|
| 5453 |
+
},
|
| 5454 |
+
{
|
| 5455 |
+
"epoch": 1.83,
|
| 5456 |
+
"learning_rate": 0.0002,
|
| 5457 |
+
"loss": 0.7371,
|
| 5458 |
+
"step": 5770
|
| 5459 |
+
},
|
| 5460 |
+
{
|
| 5461 |
+
"epoch": 1.83,
|
| 5462 |
+
"learning_rate": 0.0002,
|
| 5463 |
+
"loss": 0.7194,
|
| 5464 |
+
"step": 5780
|
| 5465 |
+
},
|
| 5466 |
+
{
|
| 5467 |
+
"epoch": 1.83,
|
| 5468 |
+
"learning_rate": 0.0002,
|
| 5469 |
+
"loss": 0.7164,
|
| 5470 |
+
"step": 5790
|
| 5471 |
+
},
|
| 5472 |
+
{
|
| 5473 |
+
"epoch": 1.84,
|
| 5474 |
+
"learning_rate": 0.0002,
|
| 5475 |
+
"loss": 0.6887,
|
| 5476 |
+
"step": 5800
|
| 5477 |
+
},
|
| 5478 |
+
{
|
| 5479 |
+
"epoch": 1.84,
|
| 5480 |
+
"eval_loss": 0.732559084892273,
|
| 5481 |
+
"eval_runtime": 111.5342,
|
| 5482 |
+
"eval_samples_per_second": 8.966,
|
| 5483 |
+
"eval_steps_per_second": 4.483,
|
| 5484 |
+
"step": 5800
|
| 5485 |
+
},
|
| 5486 |
+
{
|
| 5487 |
+
"epoch": 1.84,
|
| 5488 |
+
"mmlu_eval_accuracy": 0.4740066355704332,
|
| 5489 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 5490 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 5491 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 5492 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 5493 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 5494 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 5495 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
| 5496 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 5497 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 5498 |
+
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182,
|
| 5499 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 5500 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 5501 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 5502 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 5503 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 5504 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073,
|
| 5505 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 5506 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 5507 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 5508 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 5509 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 5510 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 5511 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 5512 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714,
|
| 5513 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4186046511627907,
|
| 5514 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 5515 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231,
|
| 5516 |
+
"mmlu_eval_accuracy_high_school_physics": 0.11764705882352941,
|
| 5517 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 5518 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
| 5519 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 5520 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6538461538461539,
|
| 5521 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 5522 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 5523 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 5524 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 5525 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 5526 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 5527 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 5528 |
+
"mmlu_eval_accuracy_marketing": 0.72,
|
| 5529 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 5530 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 5531 |
+
"mmlu_eval_accuracy_moral_disputes": 0.42105263157894735,
|
| 5532 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 5533 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
| 5534 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 5535 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 5536 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 5537 |
+
"mmlu_eval_accuracy_professional_law": 0.29411764705882354,
|
| 5538 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6451612903225806,
|
| 5539 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4927536231884058,
|
| 5540 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 5541 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 5542 |
+
"mmlu_eval_accuracy_sociology": 0.6363636363636364,
|
| 5543 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 5544 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 5545 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 5546 |
+
"mmlu_loss": 1.3845557023822173,
|
| 5547 |
+
"step": 5800
|
| 5548 |
+
},
|
| 5549 |
+
{
|
| 5550 |
+
"epoch": 1.84,
|
| 5551 |
+
"learning_rate": 0.0002,
|
| 5552 |
+
"loss": 0.668,
|
| 5553 |
+
"step": 5810
|
| 5554 |
+
},
|
| 5555 |
+
{
|
| 5556 |
+
"epoch": 1.84,
|
| 5557 |
+
"learning_rate": 0.0002,
|
| 5558 |
+
"loss": 0.6993,
|
| 5559 |
+
"step": 5820
|
| 5560 |
+
},
|
| 5561 |
+
{
|
| 5562 |
+
"epoch": 1.85,
|
| 5563 |
+
"learning_rate": 0.0002,
|
| 5564 |
+
"loss": 0.7418,
|
| 5565 |
+
"step": 5830
|
| 5566 |
+
},
|
| 5567 |
+
{
|
| 5568 |
+
"epoch": 1.85,
|
| 5569 |
+
"learning_rate": 0.0002,
|
| 5570 |
+
"loss": 0.6916,
|
| 5571 |
+
"step": 5840
|
| 5572 |
+
},
|
| 5573 |
+
{
|
| 5574 |
+
"epoch": 1.85,
|
| 5575 |
+
"learning_rate": 0.0002,
|
| 5576 |
+
"loss": 0.7564,
|
| 5577 |
+
"step": 5850
|
| 5578 |
+
},
|
| 5579 |
+
{
|
| 5580 |
+
"epoch": 1.86,
|
| 5581 |
+
"learning_rate": 0.0002,
|
| 5582 |
+
"loss": 0.641,
|
| 5583 |
+
"step": 5860
|
| 5584 |
+
},
|
| 5585 |
+
{
|
| 5586 |
+
"epoch": 1.86,
|
| 5587 |
+
"learning_rate": 0.0002,
|
| 5588 |
+
"loss": 0.7593,
|
| 5589 |
+
"step": 5870
|
| 5590 |
+
},
|
| 5591 |
+
{
|
| 5592 |
+
"epoch": 1.86,
|
| 5593 |
+
"learning_rate": 0.0002,
|
| 5594 |
+
"loss": 0.6886,
|
| 5595 |
+
"step": 5880
|
| 5596 |
+
},
|
| 5597 |
+
{
|
| 5598 |
+
"epoch": 1.87,
|
| 5599 |
+
"learning_rate": 0.0002,
|
| 5600 |
+
"loss": 0.7053,
|
| 5601 |
+
"step": 5890
|
| 5602 |
+
},
|
| 5603 |
+
{
|
| 5604 |
+
"epoch": 1.87,
|
| 5605 |
+
"learning_rate": 0.0002,
|
| 5606 |
+
"loss": 0.6201,
|
| 5607 |
+
"step": 5900
|
| 5608 |
+
},
|
| 5609 |
+
{
|
| 5610 |
+
"epoch": 1.87,
|
| 5611 |
+
"learning_rate": 0.0002,
|
| 5612 |
+
"loss": 0.6998,
|
| 5613 |
+
"step": 5910
|
| 5614 |
+
},
|
| 5615 |
+
{
|
| 5616 |
+
"epoch": 1.88,
|
| 5617 |
+
"learning_rate": 0.0002,
|
| 5618 |
+
"loss": 0.6768,
|
| 5619 |
+
"step": 5920
|
| 5620 |
+
},
|
| 5621 |
+
{
|
| 5622 |
+
"epoch": 1.88,
|
| 5623 |
+
"learning_rate": 0.0002,
|
| 5624 |
+
"loss": 0.711,
|
| 5625 |
+
"step": 5930
|
| 5626 |
+
},
|
| 5627 |
+
{
|
| 5628 |
+
"epoch": 1.88,
|
| 5629 |
+
"learning_rate": 0.0002,
|
| 5630 |
+
"loss": 0.681,
|
| 5631 |
+
"step": 5940
|
| 5632 |
+
},
|
| 5633 |
+
{
|
| 5634 |
+
"epoch": 1.89,
|
| 5635 |
+
"learning_rate": 0.0002,
|
| 5636 |
+
"loss": 0.7145,
|
| 5637 |
+
"step": 5950
|
| 5638 |
+
},
|
| 5639 |
+
{
|
| 5640 |
+
"epoch": 1.89,
|
| 5641 |
+
"learning_rate": 0.0002,
|
| 5642 |
+
"loss": 0.7513,
|
| 5643 |
+
"step": 5960
|
| 5644 |
+
},
|
| 5645 |
+
{
|
| 5646 |
+
"epoch": 1.89,
|
| 5647 |
+
"learning_rate": 0.0002,
|
| 5648 |
+
"loss": 0.6817,
|
| 5649 |
+
"step": 5970
|
| 5650 |
+
},
|
| 5651 |
+
{
|
| 5652 |
+
"epoch": 1.89,
|
| 5653 |
+
"learning_rate": 0.0002,
|
| 5654 |
+
"loss": 0.6757,
|
| 5655 |
+
"step": 5980
|
| 5656 |
+
},
|
| 5657 |
+
{
|
| 5658 |
+
"epoch": 1.9,
|
| 5659 |
+
"learning_rate": 0.0002,
|
| 5660 |
+
"loss": 0.6899,
|
| 5661 |
+
"step": 5990
|
| 5662 |
+
},
|
| 5663 |
+
{
|
| 5664 |
+
"epoch": 1.9,
|
| 5665 |
+
"learning_rate": 0.0002,
|
| 5666 |
+
"loss": 0.6821,
|
| 5667 |
+
"step": 6000
|
| 5668 |
+
},
|
| 5669 |
+
{
|
| 5670 |
+
"epoch": 1.9,
|
| 5671 |
+
"eval_loss": 0.7302425503730774,
|
| 5672 |
+
"eval_runtime": 111.0525,
|
| 5673 |
+
"eval_samples_per_second": 9.005,
|
| 5674 |
+
"eval_steps_per_second": 4.502,
|
| 5675 |
+
"step": 6000
|
| 5676 |
+
},
|
| 5677 |
+
{
|
| 5678 |
+
"epoch": 1.9,
|
| 5679 |
+
"mmlu_eval_accuracy": 0.47023094937776666,
|
| 5680 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
| 5681 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 5682 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 5683 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 5684 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 5685 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 5686 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 5687 |
+
"mmlu_eval_accuracy_college_computer_science": 0.18181818181818182,
|
| 5688 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 5689 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 5690 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 5691 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 5692 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 5693 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 5694 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 5695 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293,
|
| 5696 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 5697 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 5698 |
+
"mmlu_eval_accuracy_high_school_biology": 0.34375,
|
| 5699 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 5700 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 5701 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 5702 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 5703 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714,
|
| 5704 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4186046511627907,
|
| 5705 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 5706 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464,
|
| 5707 |
+
"mmlu_eval_accuracy_high_school_physics": 0.11764705882352941,
|
| 5708 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 5709 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.21739130434782608,
|
| 5710 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 5711 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 5712 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 5713 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 5714 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 5715 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 5716 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 5717 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 5718 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 5719 |
+
"mmlu_eval_accuracy_marketing": 0.72,
|
| 5720 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 5721 |
+
"mmlu_eval_accuracy_miscellaneous": 0.627906976744186,
|
| 5722 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 5723 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 5724 |
+
"mmlu_eval_accuracy_nutrition": 0.5757575757575758,
|
| 5725 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 5726 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 5727 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 5728 |
+
"mmlu_eval_accuracy_professional_law": 0.3,
|
| 5729 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6451612903225806,
|
| 5730 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
| 5731 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 5732 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 5733 |
+
"mmlu_eval_accuracy_sociology": 0.6363636363636364,
|
| 5734 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364,
|
| 5735 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 5736 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 5737 |
+
"mmlu_loss": 1.4106916200087525,
|
| 5738 |
+
"step": 6000
|
| 5739 |
+
},
|
| 5740 |
+
{
|
| 5741 |
+
"epoch": 1.9,
|
| 5742 |
+
"learning_rate": 0.0002,
|
| 5743 |
+
"loss": 0.7115,
|
| 5744 |
+
"step": 6010
|
| 5745 |
+
},
|
| 5746 |
+
{
|
| 5747 |
+
"epoch": 1.91,
|
| 5748 |
+
"learning_rate": 0.0002,
|
| 5749 |
+
"loss": 0.6862,
|
| 5750 |
+
"step": 6020
|
| 5751 |
+
},
|
| 5752 |
+
{
|
| 5753 |
+
"epoch": 1.91,
|
| 5754 |
+
"learning_rate": 0.0002,
|
| 5755 |
+
"loss": 0.6705,
|
| 5756 |
+
"step": 6030
|
| 5757 |
+
},
|
| 5758 |
+
{
|
| 5759 |
+
"epoch": 1.91,
|
| 5760 |
+
"learning_rate": 0.0002,
|
| 5761 |
+
"loss": 0.6848,
|
| 5762 |
+
"step": 6040
|
| 5763 |
+
},
|
| 5764 |
+
{
|
| 5765 |
+
"epoch": 1.92,
|
| 5766 |
+
"learning_rate": 0.0002,
|
| 5767 |
+
"loss": 0.7765,
|
| 5768 |
+
"step": 6050
|
| 5769 |
+
},
|
| 5770 |
+
{
|
| 5771 |
+
"epoch": 1.92,
|
| 5772 |
+
"learning_rate": 0.0002,
|
| 5773 |
+
"loss": 0.6801,
|
| 5774 |
+
"step": 6060
|
| 5775 |
+
},
|
| 5776 |
+
{
|
| 5777 |
+
"epoch": 1.92,
|
| 5778 |
+
"learning_rate": 0.0002,
|
| 5779 |
+
"loss": 0.6648,
|
| 5780 |
+
"step": 6070
|
| 5781 |
+
},
|
| 5782 |
+
{
|
| 5783 |
+
"epoch": 1.93,
|
| 5784 |
+
"learning_rate": 0.0002,
|
| 5785 |
+
"loss": 0.6847,
|
| 5786 |
+
"step": 6080
|
| 5787 |
+
},
|
| 5788 |
+
{
|
| 5789 |
+
"epoch": 1.93,
|
| 5790 |
+
"learning_rate": 0.0002,
|
| 5791 |
+
"loss": 0.665,
|
| 5792 |
+
"step": 6090
|
| 5793 |
+
},
|
| 5794 |
+
{
|
| 5795 |
+
"epoch": 1.93,
|
| 5796 |
+
"learning_rate": 0.0002,
|
| 5797 |
+
"loss": 0.7627,
|
| 5798 |
+
"step": 6100
|
| 5799 |
+
},
|
| 5800 |
+
{
|
| 5801 |
+
"epoch": 1.94,
|
| 5802 |
+
"learning_rate": 0.0002,
|
| 5803 |
+
"loss": 0.6874,
|
| 5804 |
+
"step": 6110
|
| 5805 |
+
},
|
| 5806 |
+
{
|
| 5807 |
+
"epoch": 1.94,
|
| 5808 |
+
"learning_rate": 0.0002,
|
| 5809 |
+
"loss": 0.6907,
|
| 5810 |
+
"step": 6120
|
| 5811 |
+
},
|
| 5812 |
+
{
|
| 5813 |
+
"epoch": 1.94,
|
| 5814 |
+
"learning_rate": 0.0002,
|
| 5815 |
+
"loss": 0.6369,
|
| 5816 |
+
"step": 6130
|
| 5817 |
+
},
|
| 5818 |
+
{
|
| 5819 |
+
"epoch": 1.95,
|
| 5820 |
+
"learning_rate": 0.0002,
|
| 5821 |
+
"loss": 0.7289,
|
| 5822 |
+
"step": 6140
|
| 5823 |
+
},
|
| 5824 |
+
{
|
| 5825 |
+
"epoch": 1.95,
|
| 5826 |
+
"learning_rate": 0.0002,
|
| 5827 |
+
"loss": 0.7233,
|
| 5828 |
+
"step": 6150
|
| 5829 |
+
},
|
| 5830 |
+
{
|
| 5831 |
+
"epoch": 1.95,
|
| 5832 |
+
"learning_rate": 0.0002,
|
| 5833 |
+
"loss": 0.68,
|
| 5834 |
+
"step": 6160
|
| 5835 |
+
},
|
| 5836 |
+
{
|
| 5837 |
+
"epoch": 1.96,
|
| 5838 |
+
"learning_rate": 0.0002,
|
| 5839 |
+
"loss": 0.6842,
|
| 5840 |
+
"step": 6170
|
| 5841 |
+
},
|
| 5842 |
+
{
|
| 5843 |
+
"epoch": 1.96,
|
| 5844 |
+
"learning_rate": 0.0002,
|
| 5845 |
+
"loss": 0.7125,
|
| 5846 |
+
"step": 6180
|
| 5847 |
+
},
|
| 5848 |
+
{
|
| 5849 |
+
"epoch": 1.96,
|
| 5850 |
+
"learning_rate": 0.0002,
|
| 5851 |
+
"loss": 0.683,
|
| 5852 |
+
"step": 6190
|
| 5853 |
+
},
|
| 5854 |
+
{
|
| 5855 |
+
"epoch": 1.96,
|
| 5856 |
+
"learning_rate": 0.0002,
|
| 5857 |
+
"loss": 0.7097,
|
| 5858 |
+
"step": 6200
|
| 5859 |
+
},
|
| 5860 |
+
{
|
| 5861 |
+
"epoch": 1.96,
|
| 5862 |
+
"eval_loss": 0.7293602228164673,
|
| 5863 |
+
"eval_runtime": 111.0579,
|
| 5864 |
+
"eval_samples_per_second": 9.004,
|
| 5865 |
+
"eval_steps_per_second": 4.502,
|
| 5866 |
+
"step": 6200
|
| 5867 |
+
},
|
| 5868 |
+
{
|
| 5869 |
+
"epoch": 1.96,
|
| 5870 |
+
"mmlu_eval_accuracy": 0.4704848103487601,
|
| 5871 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 5872 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 5873 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 5874 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 5875 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
|
| 5876 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 5877 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 5878 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 5879 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 5880 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 5881 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 5882 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 5883 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 5884 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 5885 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 5886 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 5887 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 5888 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 5889 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 5890 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 5891 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 5892 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 5893 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 5894 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714,
|
| 5895 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4883720930232558,
|
| 5896 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 5897 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231,
|
| 5898 |
+
"mmlu_eval_accuracy_high_school_physics": 0.11764705882352941,
|
| 5899 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 5900 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
| 5901 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 5902 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 5903 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
| 5904 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 5905 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 5906 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 5907 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 5908 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 5909 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 5910 |
+
"mmlu_eval_accuracy_marketing": 0.68,
|
| 5911 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 5912 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 5913 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5526315789473685,
|
| 5914 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.28,
|
| 5915 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 5916 |
+
"mmlu_eval_accuracy_philosophy": 0.4411764705882353,
|
| 5917 |
+
"mmlu_eval_accuracy_prehistory": 0.45714285714285713,
|
| 5918 |
+
"mmlu_eval_accuracy_professional_accounting": 0.41935483870967744,
|
| 5919 |
+
"mmlu_eval_accuracy_professional_law": 0.31176470588235294,
|
| 5920 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
| 5921 |
+
"mmlu_eval_accuracy_professional_psychology": 0.463768115942029,
|
| 5922 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 5923 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 5924 |
+
"mmlu_eval_accuracy_sociology": 0.6363636363636364,
|
| 5925 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364,
|
| 5926 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 5927 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 5928 |
+
"mmlu_loss": 1.374586288985011,
|
| 5929 |
+
"step": 6200
|
| 5930 |
+
},
|
| 5931 |
+
{
|
| 5932 |
+
"epoch": 1.97,
|
| 5933 |
+
"learning_rate": 0.0002,
|
| 5934 |
+
"loss": 0.7095,
|
| 5935 |
+
"step": 6210
|
| 5936 |
+
},
|
| 5937 |
+
{
|
| 5938 |
+
"epoch": 1.97,
|
| 5939 |
+
"learning_rate": 0.0002,
|
| 5940 |
+
"loss": 0.7681,
|
| 5941 |
+
"step": 6220
|
| 5942 |
+
},
|
| 5943 |
+
{
|
| 5944 |
+
"epoch": 1.97,
|
| 5945 |
+
"learning_rate": 0.0002,
|
| 5946 |
+
"loss": 0.7356,
|
| 5947 |
+
"step": 6230
|
| 5948 |
+
},
|
| 5949 |
+
{
|
| 5950 |
+
"epoch": 1.98,
|
| 5951 |
+
"learning_rate": 0.0002,
|
| 5952 |
+
"loss": 0.6956,
|
| 5953 |
+
"step": 6240
|
| 5954 |
+
},
|
| 5955 |
+
{
|
| 5956 |
+
"epoch": 1.98,
|
| 5957 |
+
"learning_rate": 0.0002,
|
| 5958 |
+
"loss": 0.7034,
|
| 5959 |
+
"step": 6250
|
| 5960 |
+
},
|
| 5961 |
+
{
|
| 5962 |
+
"epoch": 1.98,
|
| 5963 |
+
"learning_rate": 0.0002,
|
| 5964 |
+
"loss": 0.6532,
|
| 5965 |
+
"step": 6260
|
| 5966 |
+
},
|
| 5967 |
+
{
|
| 5968 |
+
"epoch": 1.99,
|
| 5969 |
+
"learning_rate": 0.0002,
|
| 5970 |
+
"loss": 0.6917,
|
| 5971 |
+
"step": 6270
|
| 5972 |
+
},
|
| 5973 |
+
{
|
| 5974 |
+
"epoch": 1.99,
|
| 5975 |
+
"learning_rate": 0.0002,
|
| 5976 |
+
"loss": 0.6392,
|
| 5977 |
+
"step": 6280
|
| 5978 |
+
},
|
| 5979 |
+
{
|
| 5980 |
+
"epoch": 1.99,
|
| 5981 |
+
"learning_rate": 0.0002,
|
| 5982 |
+
"loss": 0.6656,
|
| 5983 |
+
"step": 6290
|
| 5984 |
+
},
|
| 5985 |
+
{
|
| 5986 |
+
"epoch": 2.0,
|
| 5987 |
+
"learning_rate": 0.0002,
|
| 5988 |
+
"loss": 0.6829,
|
| 5989 |
+
"step": 6300
|
| 5990 |
+
},
|
| 5991 |
+
{
|
| 5992 |
+
"epoch": 2.0,
|
| 5993 |
+
"learning_rate": 0.0002,
|
| 5994 |
+
"loss": 0.675,
|
| 5995 |
+
"step": 6310
|
| 5996 |
+
},
|
| 5997 |
+
{
|
| 5998 |
+
"epoch": 2.0,
|
| 5999 |
+
"learning_rate": 0.0002,
|
| 6000 |
+
"loss": 0.6321,
|
| 6001 |
+
"step": 6320
|
| 6002 |
+
},
|
| 6003 |
+
{
|
| 6004 |
+
"epoch": 2.01,
|
| 6005 |
+
"learning_rate": 0.0002,
|
| 6006 |
+
"loss": 0.6109,
|
| 6007 |
+
"step": 6330
|
| 6008 |
+
},
|
| 6009 |
+
{
|
| 6010 |
+
"epoch": 2.01,
|
| 6011 |
+
"learning_rate": 0.0002,
|
| 6012 |
+
"loss": 0.6065,
|
| 6013 |
+
"step": 6340
|
| 6014 |
+
},
|
| 6015 |
+
{
|
| 6016 |
+
"epoch": 2.01,
|
| 6017 |
+
"learning_rate": 0.0002,
|
| 6018 |
+
"loss": 0.5912,
|
| 6019 |
+
"step": 6350
|
| 6020 |
+
},
|
| 6021 |
+
{
|
| 6022 |
+
"epoch": 2.02,
|
| 6023 |
+
"learning_rate": 0.0002,
|
| 6024 |
+
"loss": 0.613,
|
| 6025 |
+
"step": 6360
|
| 6026 |
+
},
|
| 6027 |
+
{
|
| 6028 |
+
"epoch": 2.02,
|
| 6029 |
+
"learning_rate": 0.0002,
|
| 6030 |
+
"loss": 0.586,
|
| 6031 |
+
"step": 6370
|
| 6032 |
+
},
|
| 6033 |
+
{
|
| 6034 |
+
"epoch": 2.02,
|
| 6035 |
+
"learning_rate": 0.0002,
|
| 6036 |
+
"loss": 0.6383,
|
| 6037 |
+
"step": 6380
|
| 6038 |
+
},
|
| 6039 |
+
{
|
| 6040 |
+
"epoch": 2.02,
|
| 6041 |
+
"learning_rate": 0.0002,
|
| 6042 |
+
"loss": 0.5629,
|
| 6043 |
+
"step": 6390
|
| 6044 |
+
},
|
| 6045 |
+
{
|
| 6046 |
+
"epoch": 2.03,
|
| 6047 |
+
"learning_rate": 0.0002,
|
| 6048 |
+
"loss": 0.6048,
|
| 6049 |
+
"step": 6400
|
| 6050 |
+
},
|
| 6051 |
+
{
|
| 6052 |
+
"epoch": 2.03,
|
| 6053 |
+
"eval_loss": 0.7574472427368164,
|
| 6054 |
+
"eval_runtime": 110.9511,
|
| 6055 |
+
"eval_samples_per_second": 9.013,
|
| 6056 |
+
"eval_steps_per_second": 4.506,
|
| 6057 |
+
"step": 6400
|
| 6058 |
+
},
|
| 6059 |
+
{
|
| 6060 |
+
"epoch": 2.03,
|
| 6061 |
+
"mmlu_eval_accuracy": 0.470592564742188,
|
| 6062 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 6063 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 6064 |
+
"mmlu_eval_accuracy_astronomy": 0.375,
|
| 6065 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 6066 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 6067 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6068 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 6069 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 6070 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 6071 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
| 6072 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 6073 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 6074 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 6075 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 6076 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 6077 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073,
|
| 6078 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 6079 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 6080 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 6081 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 6082 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6083 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 6084 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 6085 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6086 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.5116279069767442,
|
| 6087 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6088 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464,
|
| 6089 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
| 6090 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 6091 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173,
|
| 6092 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 6093 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 6094 |
+
"mmlu_eval_accuracy_human_aging": 0.7391304347826086,
|
| 6095 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 6096 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 6097 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6098 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6099 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 6100 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 6101 |
+
"mmlu_eval_accuracy_marketing": 0.76,
|
| 6102 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6103 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
| 6104 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 6105 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.26,
|
| 6106 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 6107 |
+
"mmlu_eval_accuracy_philosophy": 0.4411764705882353,
|
| 6108 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 6109 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
| 6110 |
+
"mmlu_eval_accuracy_professional_law": 0.32941176470588235,
|
| 6111 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6129032258064516,
|
| 6112 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4927536231884058,
|
| 6113 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 6114 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 6115 |
+
"mmlu_eval_accuracy_sociology": 0.6363636363636364,
|
| 6116 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 6117 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 6118 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 6119 |
+
"mmlu_loss": 1.3004325469542422,
|
| 6120 |
+
"step": 6400
|
| 6121 |
+
},
|
| 6122 |
+
{
|
| 6123 |
+
"epoch": 2.03,
|
| 6124 |
+
"learning_rate": 0.0002,
|
| 6125 |
+
"loss": 0.5702,
|
| 6126 |
+
"step": 6410
|
| 6127 |
+
},
|
| 6128 |
+
{
|
| 6129 |
+
"epoch": 2.03,
|
| 6130 |
+
"learning_rate": 0.0002,
|
| 6131 |
+
"loss": 0.5957,
|
| 6132 |
+
"step": 6420
|
| 6133 |
+
},
|
| 6134 |
+
{
|
| 6135 |
+
"epoch": 2.04,
|
| 6136 |
+
"learning_rate": 0.0002,
|
| 6137 |
+
"loss": 0.5994,
|
| 6138 |
+
"step": 6430
|
| 6139 |
+
},
|
| 6140 |
+
{
|
| 6141 |
+
"epoch": 2.04,
|
| 6142 |
+
"learning_rate": 0.0002,
|
| 6143 |
+
"loss": 0.5922,
|
| 6144 |
+
"step": 6440
|
| 6145 |
+
},
|
| 6146 |
+
{
|
| 6147 |
+
"epoch": 2.04,
|
| 6148 |
+
"learning_rate": 0.0002,
|
| 6149 |
+
"loss": 0.5626,
|
| 6150 |
+
"step": 6450
|
| 6151 |
+
},
|
| 6152 |
+
{
|
| 6153 |
+
"epoch": 2.05,
|
| 6154 |
+
"learning_rate": 0.0002,
|
| 6155 |
+
"loss": 0.5912,
|
| 6156 |
+
"step": 6460
|
| 6157 |
+
},
|
| 6158 |
+
{
|
| 6159 |
+
"epoch": 2.05,
|
| 6160 |
+
"learning_rate": 0.0002,
|
| 6161 |
+
"loss": 0.5877,
|
| 6162 |
+
"step": 6470
|
| 6163 |
+
},
|
| 6164 |
+
{
|
| 6165 |
+
"epoch": 2.05,
|
| 6166 |
+
"learning_rate": 0.0002,
|
| 6167 |
+
"loss": 0.578,
|
| 6168 |
+
"step": 6480
|
| 6169 |
+
},
|
| 6170 |
+
{
|
| 6171 |
+
"epoch": 2.06,
|
| 6172 |
+
"learning_rate": 0.0002,
|
| 6173 |
+
"loss": 0.6207,
|
| 6174 |
+
"step": 6490
|
| 6175 |
+
},
|
| 6176 |
+
{
|
| 6177 |
+
"epoch": 2.06,
|
| 6178 |
+
"learning_rate": 0.0002,
|
| 6179 |
+
"loss": 0.5606,
|
| 6180 |
+
"step": 6500
|
| 6181 |
+
},
|
| 6182 |
+
{
|
| 6183 |
+
"epoch": 2.06,
|
| 6184 |
+
"learning_rate": 0.0002,
|
| 6185 |
+
"loss": 0.553,
|
| 6186 |
+
"step": 6510
|
| 6187 |
+
},
|
| 6188 |
+
{
|
| 6189 |
+
"epoch": 2.07,
|
| 6190 |
+
"learning_rate": 0.0002,
|
| 6191 |
+
"loss": 0.6092,
|
| 6192 |
+
"step": 6520
|
| 6193 |
+
},
|
| 6194 |
+
{
|
| 6195 |
+
"epoch": 2.07,
|
| 6196 |
+
"learning_rate": 0.0002,
|
| 6197 |
+
"loss": 0.6183,
|
| 6198 |
+
"step": 6530
|
| 6199 |
+
},
|
| 6200 |
+
{
|
| 6201 |
+
"epoch": 2.07,
|
| 6202 |
+
"learning_rate": 0.0002,
|
| 6203 |
+
"loss": 0.5825,
|
| 6204 |
+
"step": 6540
|
| 6205 |
+
},
|
| 6206 |
+
{
|
| 6207 |
+
"epoch": 2.08,
|
| 6208 |
+
"learning_rate": 0.0002,
|
| 6209 |
+
"loss": 0.5674,
|
| 6210 |
+
"step": 6550
|
| 6211 |
+
},
|
| 6212 |
+
{
|
| 6213 |
+
"epoch": 2.08,
|
| 6214 |
+
"learning_rate": 0.0002,
|
| 6215 |
+
"loss": 0.5587,
|
| 6216 |
+
"step": 6560
|
| 6217 |
+
},
|
| 6218 |
+
{
|
| 6219 |
+
"epoch": 2.08,
|
| 6220 |
+
"learning_rate": 0.0002,
|
| 6221 |
+
"loss": 0.5317,
|
| 6222 |
+
"step": 6570
|
| 6223 |
+
},
|
| 6224 |
+
{
|
| 6225 |
+
"epoch": 2.08,
|
| 6226 |
+
"learning_rate": 0.0002,
|
| 6227 |
+
"loss": 0.6731,
|
| 6228 |
+
"step": 6580
|
| 6229 |
+
},
|
| 6230 |
+
{
|
| 6231 |
+
"epoch": 2.09,
|
| 6232 |
+
"learning_rate": 0.0002,
|
| 6233 |
+
"loss": 0.6242,
|
| 6234 |
+
"step": 6590
|
| 6235 |
+
},
|
| 6236 |
+
{
|
| 6237 |
+
"epoch": 2.09,
|
| 6238 |
+
"learning_rate": 0.0002,
|
| 6239 |
+
"loss": 0.6332,
|
| 6240 |
+
"step": 6600
|
| 6241 |
+
},
|
| 6242 |
+
{
|
| 6243 |
+
"epoch": 2.09,
|
| 6244 |
+
"eval_loss": 0.7567528486251831,
|
| 6245 |
+
"eval_runtime": 111.0264,
|
| 6246 |
+
"eval_samples_per_second": 9.007,
|
| 6247 |
+
"eval_steps_per_second": 4.503,
|
| 6248 |
+
"step": 6600
|
| 6249 |
+
},
|
| 6250 |
+
{
|
| 6251 |
+
"epoch": 2.09,
|
| 6252 |
+
"mmlu_eval_accuracy": 0.47542707100737025,
|
| 6253 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 6254 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 6255 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 6256 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 6257 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 6258 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6259 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 6260 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 6261 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 6262 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 6263 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 6264 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 6265 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 6266 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6267 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 6268 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 6269 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 6270 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 6271 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 6272 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 6273 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6274 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 6275 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 6276 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
| 6277 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
| 6278 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6279 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 6280 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
| 6281 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.85,
|
| 6282 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
| 6283 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 6284 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 6285 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 6286 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 6287 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6288 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6289 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6290 |
+
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365,
|
| 6291 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 6292 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 6293 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6294 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
| 6295 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
| 6296 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.26,
|
| 6297 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 6298 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 6299 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 6300 |
+
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225,
|
| 6301 |
+
"mmlu_eval_accuracy_professional_law": 0.31176470588235294,
|
| 6302 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
| 6303 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
| 6304 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 6305 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 6306 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 6307 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 6308 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 6309 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 6310 |
+
"mmlu_loss": 1.4275867019247448,
|
| 6311 |
+
"step": 6600
|
| 6312 |
+
},
|
| 6313 |
+
{
|
| 6314 |
+
"epoch": 2.09,
|
| 6315 |
+
"learning_rate": 0.0002,
|
| 6316 |
+
"loss": 0.5948,
|
| 6317 |
+
"step": 6610
|
| 6318 |
+
},
|
| 6319 |
+
{
|
| 6320 |
+
"epoch": 2.1,
|
| 6321 |
+
"learning_rate": 0.0002,
|
| 6322 |
+
"loss": 0.6068,
|
| 6323 |
+
"step": 6620
|
| 6324 |
+
},
|
| 6325 |
+
{
|
| 6326 |
+
"epoch": 2.1,
|
| 6327 |
+
"learning_rate": 0.0002,
|
| 6328 |
+
"loss": 0.5831,
|
| 6329 |
+
"step": 6630
|
| 6330 |
+
},
|
| 6331 |
+
{
|
| 6332 |
+
"epoch": 2.1,
|
| 6333 |
+
"learning_rate": 0.0002,
|
| 6334 |
+
"loss": 0.5664,
|
| 6335 |
+
"step": 6640
|
| 6336 |
+
},
|
| 6337 |
+
{
|
| 6338 |
+
"epoch": 2.11,
|
| 6339 |
+
"learning_rate": 0.0002,
|
| 6340 |
+
"loss": 0.622,
|
| 6341 |
+
"step": 6650
|
| 6342 |
+
},
|
| 6343 |
+
{
|
| 6344 |
+
"epoch": 2.11,
|
| 6345 |
+
"learning_rate": 0.0002,
|
| 6346 |
+
"loss": 0.5759,
|
| 6347 |
+
"step": 6660
|
| 6348 |
+
},
|
| 6349 |
+
{
|
| 6350 |
+
"epoch": 2.11,
|
| 6351 |
+
"learning_rate": 0.0002,
|
| 6352 |
+
"loss": 0.5841,
|
| 6353 |
+
"step": 6670
|
| 6354 |
+
},
|
| 6355 |
+
{
|
| 6356 |
+
"epoch": 2.12,
|
| 6357 |
+
"learning_rate": 0.0002,
|
| 6358 |
+
"loss": 0.6221,
|
| 6359 |
+
"step": 6680
|
| 6360 |
+
},
|
| 6361 |
+
{
|
| 6362 |
+
"epoch": 2.12,
|
| 6363 |
+
"learning_rate": 0.0002,
|
| 6364 |
+
"loss": 0.5904,
|
| 6365 |
+
"step": 6690
|
| 6366 |
+
},
|
| 6367 |
+
{
|
| 6368 |
+
"epoch": 2.12,
|
| 6369 |
+
"learning_rate": 0.0002,
|
| 6370 |
+
"loss": 0.6121,
|
| 6371 |
+
"step": 6700
|
| 6372 |
+
},
|
| 6373 |
+
{
|
| 6374 |
+
"epoch": 2.13,
|
| 6375 |
+
"learning_rate": 0.0002,
|
| 6376 |
+
"loss": 0.5526,
|
| 6377 |
+
"step": 6710
|
| 6378 |
+
},
|
| 6379 |
+
{
|
| 6380 |
+
"epoch": 2.13,
|
| 6381 |
+
"learning_rate": 0.0002,
|
| 6382 |
+
"loss": 0.6742,
|
| 6383 |
+
"step": 6720
|
| 6384 |
+
},
|
| 6385 |
+
{
|
| 6386 |
+
"epoch": 2.13,
|
| 6387 |
+
"learning_rate": 0.0002,
|
| 6388 |
+
"loss": 0.5705,
|
| 6389 |
+
"step": 6730
|
| 6390 |
+
},
|
| 6391 |
+
{
|
| 6392 |
+
"epoch": 2.14,
|
| 6393 |
+
"learning_rate": 0.0002,
|
| 6394 |
+
"loss": 0.6151,
|
| 6395 |
+
"step": 6740
|
| 6396 |
+
},
|
| 6397 |
+
{
|
| 6398 |
+
"epoch": 2.14,
|
| 6399 |
+
"learning_rate": 0.0002,
|
| 6400 |
+
"loss": 0.5902,
|
| 6401 |
+
"step": 6750
|
| 6402 |
+
},
|
| 6403 |
+
{
|
| 6404 |
+
"epoch": 2.14,
|
| 6405 |
+
"learning_rate": 0.0002,
|
| 6406 |
+
"loss": 0.6448,
|
| 6407 |
+
"step": 6760
|
| 6408 |
+
},
|
| 6409 |
+
{
|
| 6410 |
+
"epoch": 2.15,
|
| 6411 |
+
"learning_rate": 0.0002,
|
| 6412 |
+
"loss": 0.5395,
|
| 6413 |
+
"step": 6770
|
| 6414 |
+
},
|
| 6415 |
+
{
|
| 6416 |
+
"epoch": 2.15,
|
| 6417 |
+
"learning_rate": 0.0002,
|
| 6418 |
+
"loss": 0.5613,
|
| 6419 |
+
"step": 6780
|
| 6420 |
+
},
|
| 6421 |
+
{
|
| 6422 |
+
"epoch": 2.15,
|
| 6423 |
+
"learning_rate": 0.0002,
|
| 6424 |
+
"loss": 0.5802,
|
| 6425 |
+
"step": 6790
|
| 6426 |
+
},
|
| 6427 |
+
{
|
| 6428 |
+
"epoch": 2.15,
|
| 6429 |
+
"learning_rate": 0.0002,
|
| 6430 |
+
"loss": 0.6026,
|
| 6431 |
+
"step": 6800
|
| 6432 |
+
},
|
| 6433 |
+
{
|
| 6434 |
+
"epoch": 2.15,
|
| 6435 |
+
"eval_loss": 0.7631368637084961,
|
| 6436 |
+
"eval_runtime": 111.0583,
|
| 6437 |
+
"eval_samples_per_second": 9.004,
|
| 6438 |
+
"eval_steps_per_second": 4.502,
|
| 6439 |
+
"step": 6800
|
| 6440 |
+
},
|
| 6441 |
+
{
|
| 6442 |
+
"epoch": 2.15,
|
| 6443 |
+
"mmlu_eval_accuracy": 0.47370240345715936,
|
| 6444 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
| 6445 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 6446 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 6447 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 6448 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 6449 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6450 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
| 6451 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 6452 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 6453 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
| 6454 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 6455 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 6456 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 6457 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6458 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 6459 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 6460 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 6461 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 6462 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 6463 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.2727272727272727,
|
| 6464 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6465 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 6466 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 6467 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6468 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.5116279069767442,
|
| 6469 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
| 6470 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 6471 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
| 6472 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 6473 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
| 6474 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 6475 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6538461538461539,
|
| 6476 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 6477 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 6478 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 6479 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6480 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6481 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 6482 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 6483 |
+
"mmlu_eval_accuracy_marketing": 0.72,
|
| 6484 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6485 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 6486 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
| 6487 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 6488 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
| 6489 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 6490 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 6491 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
| 6492 |
+
"mmlu_eval_accuracy_professional_law": 0.3235294117647059,
|
| 6493 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5483870967741935,
|
| 6494 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
| 6495 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 6496 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 6497 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 6498 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 6499 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 6500 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 6501 |
+
"mmlu_loss": 1.295992794337223,
|
| 6502 |
+
"step": 6800
|
| 6503 |
+
},
|
| 6504 |
+
{
|
| 6505 |
+
"epoch": 2.16,
|
| 6506 |
+
"learning_rate": 0.0002,
|
| 6507 |
+
"loss": 0.5435,
|
| 6508 |
+
"step": 6810
|
| 6509 |
+
},
|
| 6510 |
+
{
|
| 6511 |
+
"epoch": 2.16,
|
| 6512 |
+
"learning_rate": 0.0002,
|
| 6513 |
+
"loss": 0.593,
|
| 6514 |
+
"step": 6820
|
| 6515 |
+
},
|
| 6516 |
+
{
|
| 6517 |
+
"epoch": 2.16,
|
| 6518 |
+
"learning_rate": 0.0002,
|
| 6519 |
+
"loss": 0.5898,
|
| 6520 |
+
"step": 6830
|
| 6521 |
+
},
|
| 6522 |
+
{
|
| 6523 |
+
"epoch": 2.17,
|
| 6524 |
+
"learning_rate": 0.0002,
|
| 6525 |
+
"loss": 0.5404,
|
| 6526 |
+
"step": 6840
|
| 6527 |
+
},
|
| 6528 |
+
{
|
| 6529 |
+
"epoch": 2.17,
|
| 6530 |
+
"learning_rate": 0.0002,
|
| 6531 |
+
"loss": 0.593,
|
| 6532 |
+
"step": 6850
|
| 6533 |
+
},
|
| 6534 |
+
{
|
| 6535 |
+
"epoch": 2.17,
|
| 6536 |
+
"learning_rate": 0.0002,
|
| 6537 |
+
"loss": 0.5832,
|
| 6538 |
+
"step": 6860
|
| 6539 |
+
},
|
| 6540 |
+
{
|
| 6541 |
+
"epoch": 2.18,
|
| 6542 |
+
"learning_rate": 0.0002,
|
| 6543 |
+
"loss": 0.6201,
|
| 6544 |
+
"step": 6870
|
| 6545 |
+
},
|
| 6546 |
+
{
|
| 6547 |
+
"epoch": 2.18,
|
| 6548 |
+
"learning_rate": 0.0002,
|
| 6549 |
+
"loss": 0.6147,
|
| 6550 |
+
"step": 6880
|
| 6551 |
+
},
|
| 6552 |
+
{
|
| 6553 |
+
"epoch": 2.18,
|
| 6554 |
+
"learning_rate": 0.0002,
|
| 6555 |
+
"loss": 0.6102,
|
| 6556 |
+
"step": 6890
|
| 6557 |
+
},
|
| 6558 |
+
{
|
| 6559 |
+
"epoch": 2.19,
|
| 6560 |
+
"learning_rate": 0.0002,
|
| 6561 |
+
"loss": 0.5885,
|
| 6562 |
+
"step": 6900
|
| 6563 |
+
},
|
| 6564 |
+
{
|
| 6565 |
+
"epoch": 2.19,
|
| 6566 |
+
"learning_rate": 0.0002,
|
| 6567 |
+
"loss": 0.5549,
|
| 6568 |
+
"step": 6910
|
| 6569 |
+
},
|
| 6570 |
+
{
|
| 6571 |
+
"epoch": 2.19,
|
| 6572 |
+
"learning_rate": 0.0002,
|
| 6573 |
+
"loss": 0.5973,
|
| 6574 |
+
"step": 6920
|
| 6575 |
+
},
|
| 6576 |
+
{
|
| 6577 |
+
"epoch": 2.2,
|
| 6578 |
+
"learning_rate": 0.0002,
|
| 6579 |
+
"loss": 0.589,
|
| 6580 |
+
"step": 6930
|
| 6581 |
+
},
|
| 6582 |
+
{
|
| 6583 |
+
"epoch": 2.2,
|
| 6584 |
+
"learning_rate": 0.0002,
|
| 6585 |
+
"loss": 0.6258,
|
| 6586 |
+
"step": 6940
|
| 6587 |
+
},
|
| 6588 |
+
{
|
| 6589 |
+
"epoch": 2.2,
|
| 6590 |
+
"learning_rate": 0.0002,
|
| 6591 |
+
"loss": 0.6038,
|
| 6592 |
+
"step": 6950
|
| 6593 |
+
},
|
| 6594 |
+
{
|
| 6595 |
+
"epoch": 2.21,
|
| 6596 |
+
"learning_rate": 0.0002,
|
| 6597 |
+
"loss": 0.5865,
|
| 6598 |
+
"step": 6960
|
| 6599 |
+
},
|
| 6600 |
+
{
|
| 6601 |
+
"epoch": 2.21,
|
| 6602 |
+
"learning_rate": 0.0002,
|
| 6603 |
+
"loss": 0.6355,
|
| 6604 |
+
"step": 6970
|
| 6605 |
+
},
|
| 6606 |
+
{
|
| 6607 |
+
"epoch": 2.21,
|
| 6608 |
+
"learning_rate": 0.0002,
|
| 6609 |
+
"loss": 0.6572,
|
| 6610 |
+
"step": 6980
|
| 6611 |
+
},
|
| 6612 |
+
{
|
| 6613 |
+
"epoch": 2.21,
|
| 6614 |
+
"learning_rate": 0.0002,
|
| 6615 |
+
"loss": 0.5367,
|
| 6616 |
+
"step": 6990
|
| 6617 |
+
},
|
| 6618 |
+
{
|
| 6619 |
+
"epoch": 2.22,
|
| 6620 |
+
"learning_rate": 0.0002,
|
| 6621 |
+
"loss": 0.5959,
|
| 6622 |
+
"step": 7000
|
| 6623 |
+
},
|
| 6624 |
+
{
|
| 6625 |
+
"epoch": 2.22,
|
| 6626 |
+
"eval_loss": 0.7645158767700195,
|
| 6627 |
+
"eval_runtime": 111.037,
|
| 6628 |
+
"eval_samples_per_second": 9.006,
|
| 6629 |
+
"eval_steps_per_second": 4.503,
|
| 6630 |
+
"step": 7000
|
| 6631 |
+
},
|
| 6632 |
+
{
|
| 6633 |
+
"epoch": 2.22,
|
| 6634 |
+
"mmlu_eval_accuracy": 0.478166482161635,
|
| 6635 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
| 6636 |
+
"mmlu_eval_accuracy_anatomy": 0.5,
|
| 6637 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 6638 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 6639 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 6640 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6641 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
| 6642 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 6643 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
| 6644 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 6645 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 6646 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
| 6647 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.46153846153846156,
|
| 6648 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6649 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 6650 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 6651 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 6652 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
| 6653 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 6654 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 6655 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6656 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 6657 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 6658 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6659 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
| 6660 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
| 6661 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5,
|
| 6662 |
+
"mmlu_eval_accuracy_high_school_physics": 0.058823529411764705,
|
| 6663 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 6664 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
| 6665 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 6666 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 6667 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
| 6668 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 6669 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 6670 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6671 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6672 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
| 6673 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 6674 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
| 6675 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6676 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 6677 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 6678 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.23,
|
| 6679 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 6680 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 6681 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 6682 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
| 6683 |
+
"mmlu_eval_accuracy_professional_law": 0.35294117647058826,
|
| 6684 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5161290322580645,
|
| 6685 |
+
"mmlu_eval_accuracy_professional_psychology": 0.463768115942029,
|
| 6686 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 6687 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 6688 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 6689 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 6690 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 6691 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 6692 |
+
"mmlu_loss": 1.506881151358079,
|
| 6693 |
+
"step": 7000
|
| 6694 |
+
},
|
| 6695 |
+
{
|
| 6696 |
+
"epoch": 2.22,
|
| 6697 |
+
"learning_rate": 0.0002,
|
| 6698 |
+
"loss": 0.6429,
|
| 6699 |
+
"step": 7010
|
| 6700 |
+
},
|
| 6701 |
+
{
|
| 6702 |
+
"epoch": 2.22,
|
| 6703 |
+
"learning_rate": 0.0002,
|
| 6704 |
+
"loss": 0.5899,
|
| 6705 |
+
"step": 7020
|
| 6706 |
+
},
|
| 6707 |
+
{
|
| 6708 |
+
"epoch": 2.23,
|
| 6709 |
+
"learning_rate": 0.0002,
|
| 6710 |
+
"loss": 0.5661,
|
| 6711 |
+
"step": 7030
|
| 6712 |
+
},
|
| 6713 |
+
{
|
| 6714 |
+
"epoch": 2.23,
|
| 6715 |
+
"learning_rate": 0.0002,
|
| 6716 |
+
"loss": 0.5747,
|
| 6717 |
+
"step": 7040
|
| 6718 |
+
},
|
| 6719 |
+
{
|
| 6720 |
+
"epoch": 2.23,
|
| 6721 |
+
"learning_rate": 0.0002,
|
| 6722 |
+
"loss": 0.603,
|
| 6723 |
+
"step": 7050
|
| 6724 |
+
},
|
| 6725 |
+
{
|
| 6726 |
+
"epoch": 2.24,
|
| 6727 |
+
"learning_rate": 0.0002,
|
| 6728 |
+
"loss": 0.5864,
|
| 6729 |
+
"step": 7060
|
| 6730 |
+
},
|
| 6731 |
+
{
|
| 6732 |
+
"epoch": 2.24,
|
| 6733 |
+
"learning_rate": 0.0002,
|
| 6734 |
+
"loss": 0.588,
|
| 6735 |
+
"step": 7070
|
| 6736 |
+
},
|
| 6737 |
+
{
|
| 6738 |
+
"epoch": 2.24,
|
| 6739 |
+
"learning_rate": 0.0002,
|
| 6740 |
+
"loss": 0.6275,
|
| 6741 |
+
"step": 7080
|
| 6742 |
+
},
|
| 6743 |
+
{
|
| 6744 |
+
"epoch": 2.25,
|
| 6745 |
+
"learning_rate": 0.0002,
|
| 6746 |
+
"loss": 0.6118,
|
| 6747 |
+
"step": 7090
|
| 6748 |
+
},
|
| 6749 |
+
{
|
| 6750 |
+
"epoch": 2.25,
|
| 6751 |
+
"learning_rate": 0.0002,
|
| 6752 |
+
"loss": 0.6475,
|
| 6753 |
+
"step": 7100
|
| 6754 |
+
},
|
| 6755 |
+
{
|
| 6756 |
+
"epoch": 2.25,
|
| 6757 |
+
"learning_rate": 0.0002,
|
| 6758 |
+
"loss": 0.6191,
|
| 6759 |
+
"step": 7110
|
| 6760 |
+
},
|
| 6761 |
+
{
|
| 6762 |
+
"epoch": 2.26,
|
| 6763 |
+
"learning_rate": 0.0002,
|
| 6764 |
+
"loss": 0.5623,
|
| 6765 |
+
"step": 7120
|
| 6766 |
+
},
|
| 6767 |
+
{
|
| 6768 |
+
"epoch": 2.26,
|
| 6769 |
+
"learning_rate": 0.0002,
|
| 6770 |
+
"loss": 0.6052,
|
| 6771 |
+
"step": 7130
|
| 6772 |
+
},
|
| 6773 |
+
{
|
| 6774 |
+
"epoch": 2.26,
|
| 6775 |
+
"learning_rate": 0.0002,
|
| 6776 |
+
"loss": 0.545,
|
| 6777 |
+
"step": 7140
|
| 6778 |
+
},
|
| 6779 |
+
{
|
| 6780 |
+
"epoch": 2.27,
|
| 6781 |
+
"learning_rate": 0.0002,
|
| 6782 |
+
"loss": 0.5975,
|
| 6783 |
+
"step": 7150
|
| 6784 |
+
},
|
| 6785 |
+
{
|
| 6786 |
+
"epoch": 2.27,
|
| 6787 |
+
"learning_rate": 0.0002,
|
| 6788 |
+
"loss": 0.6022,
|
| 6789 |
+
"step": 7160
|
| 6790 |
+
},
|
| 6791 |
+
{
|
| 6792 |
+
"epoch": 2.27,
|
| 6793 |
+
"learning_rate": 0.0002,
|
| 6794 |
+
"loss": 0.608,
|
| 6795 |
+
"step": 7170
|
| 6796 |
+
},
|
| 6797 |
+
{
|
| 6798 |
+
"epoch": 2.28,
|
| 6799 |
+
"learning_rate": 0.0002,
|
| 6800 |
+
"loss": 0.6401,
|
| 6801 |
+
"step": 7180
|
| 6802 |
+
},
|
| 6803 |
+
{
|
| 6804 |
+
"epoch": 2.28,
|
| 6805 |
+
"learning_rate": 0.0002,
|
| 6806 |
+
"loss": 0.6429,
|
| 6807 |
+
"step": 7190
|
| 6808 |
+
},
|
| 6809 |
+
{
|
| 6810 |
+
"epoch": 2.28,
|
| 6811 |
+
"learning_rate": 0.0002,
|
| 6812 |
+
"loss": 0.5495,
|
| 6813 |
+
"step": 7200
|
| 6814 |
+
},
|
| 6815 |
+
{
|
| 6816 |
+
"epoch": 2.28,
|
| 6817 |
+
"eval_loss": 0.7578040361404419,
|
| 6818 |
+
"eval_runtime": 111.0662,
|
| 6819 |
+
"eval_samples_per_second": 9.004,
|
| 6820 |
+
"eval_steps_per_second": 4.502,
|
| 6821 |
+
"step": 7200
|
| 6822 |
+
},
|
| 6823 |
+
{
|
| 6824 |
+
"epoch": 2.28,
|
| 6825 |
+
"mmlu_eval_accuracy": 0.47051789661643223,
|
| 6826 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
| 6827 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 6828 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 6829 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 6830 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 6831 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6832 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 6833 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 6834 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
| 6835 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 6836 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 6837 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 6838 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 6839 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 6840 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 6841 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293,
|
| 6842 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 6843 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 6844 |
+
"mmlu_eval_accuracy_high_school_biology": 0.34375,
|
| 6845 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 6846 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6847 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 6848 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 6849 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6850 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
| 6851 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
|
| 6852 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 6853 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
| 6854 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 6855 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654,
|
| 6856 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 6857 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 6858 |
+
"mmlu_eval_accuracy_human_aging": 0.7391304347826086,
|
| 6859 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 6860 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6861 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6862 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6863 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
| 6864 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 6865 |
+
"mmlu_eval_accuracy_marketing": 0.76,
|
| 6866 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6867 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
| 6868 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 6869 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 6870 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 6871 |
+
"mmlu_eval_accuracy_philosophy": 0.4411764705882353,
|
| 6872 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 6873 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194,
|
| 6874 |
+
"mmlu_eval_accuracy_professional_law": 0.3176470588235294,
|
| 6875 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6451612903225806,
|
| 6876 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
| 6877 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 6878 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 6879 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 6880 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 6881 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 6882 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 6883 |
+
"mmlu_loss": 1.5382918150944747,
|
| 6884 |
+
"step": 7200
|
| 6885 |
+
},
|
| 6886 |
+
{
|
| 6887 |
+
"epoch": 2.28,
|
| 6888 |
+
"learning_rate": 0.0002,
|
| 6889 |
+
"loss": 0.5606,
|
| 6890 |
+
"step": 7210
|
| 6891 |
+
},
|
| 6892 |
+
{
|
| 6893 |
+
"epoch": 2.29,
|
| 6894 |
+
"learning_rate": 0.0002,
|
| 6895 |
+
"loss": 0.5737,
|
| 6896 |
+
"step": 7220
|
| 6897 |
+
},
|
| 6898 |
+
{
|
| 6899 |
+
"epoch": 2.29,
|
| 6900 |
+
"learning_rate": 0.0002,
|
| 6901 |
+
"loss": 0.6112,
|
| 6902 |
+
"step": 7230
|
| 6903 |
+
},
|
| 6904 |
+
{
|
| 6905 |
+
"epoch": 2.29,
|
| 6906 |
+
"learning_rate": 0.0002,
|
| 6907 |
+
"loss": 0.626,
|
| 6908 |
+
"step": 7240
|
| 6909 |
+
},
|
| 6910 |
+
{
|
| 6911 |
+
"epoch": 2.3,
|
| 6912 |
+
"learning_rate": 0.0002,
|
| 6913 |
+
"loss": 0.608,
|
| 6914 |
+
"step": 7250
|
| 6915 |
+
},
|
| 6916 |
+
{
|
| 6917 |
+
"epoch": 2.3,
|
| 6918 |
+
"learning_rate": 0.0002,
|
| 6919 |
+
"loss": 0.6265,
|
| 6920 |
+
"step": 7260
|
| 6921 |
+
},
|
| 6922 |
+
{
|
| 6923 |
+
"epoch": 2.3,
|
| 6924 |
+
"learning_rate": 0.0002,
|
| 6925 |
+
"loss": 0.6053,
|
| 6926 |
+
"step": 7270
|
| 6927 |
+
},
|
| 6928 |
+
{
|
| 6929 |
+
"epoch": 2.31,
|
| 6930 |
+
"learning_rate": 0.0002,
|
| 6931 |
+
"loss": 0.6135,
|
| 6932 |
+
"step": 7280
|
| 6933 |
+
},
|
| 6934 |
+
{
|
| 6935 |
+
"epoch": 2.31,
|
| 6936 |
+
"learning_rate": 0.0002,
|
| 6937 |
+
"loss": 0.5217,
|
| 6938 |
+
"step": 7290
|
| 6939 |
+
},
|
| 6940 |
+
{
|
| 6941 |
+
"epoch": 2.31,
|
| 6942 |
+
"learning_rate": 0.0002,
|
| 6943 |
+
"loss": 0.6124,
|
| 6944 |
+
"step": 7300
|
| 6945 |
+
},
|
| 6946 |
+
{
|
| 6947 |
+
"epoch": 2.32,
|
| 6948 |
+
"learning_rate": 0.0002,
|
| 6949 |
+
"loss": 0.5506,
|
| 6950 |
+
"step": 7310
|
| 6951 |
+
},
|
| 6952 |
+
{
|
| 6953 |
+
"epoch": 2.32,
|
| 6954 |
+
"learning_rate": 0.0002,
|
| 6955 |
+
"loss": 0.6095,
|
| 6956 |
+
"step": 7320
|
| 6957 |
+
},
|
| 6958 |
+
{
|
| 6959 |
+
"epoch": 2.32,
|
| 6960 |
+
"learning_rate": 0.0002,
|
| 6961 |
+
"loss": 0.5972,
|
| 6962 |
+
"step": 7330
|
| 6963 |
+
},
|
| 6964 |
+
{
|
| 6965 |
+
"epoch": 2.33,
|
| 6966 |
+
"learning_rate": 0.0002,
|
| 6967 |
+
"loss": 0.6714,
|
| 6968 |
+
"step": 7340
|
| 6969 |
+
},
|
| 6970 |
+
{
|
| 6971 |
+
"epoch": 2.33,
|
| 6972 |
+
"learning_rate": 0.0002,
|
| 6973 |
+
"loss": 0.6083,
|
| 6974 |
+
"step": 7350
|
| 6975 |
+
},
|
| 6976 |
+
{
|
| 6977 |
+
"epoch": 2.33,
|
| 6978 |
+
"learning_rate": 0.0002,
|
| 6979 |
+
"loss": 0.6033,
|
| 6980 |
+
"step": 7360
|
| 6981 |
+
},
|
| 6982 |
+
{
|
| 6983 |
+
"epoch": 2.34,
|
| 6984 |
+
"learning_rate": 0.0002,
|
| 6985 |
+
"loss": 0.5881,
|
| 6986 |
+
"step": 7370
|
| 6987 |
+
},
|
| 6988 |
+
{
|
| 6989 |
+
"epoch": 2.34,
|
| 6990 |
+
"learning_rate": 0.0002,
|
| 6991 |
+
"loss": 0.5958,
|
| 6992 |
+
"step": 7380
|
| 6993 |
+
},
|
| 6994 |
+
{
|
| 6995 |
+
"epoch": 2.34,
|
| 6996 |
+
"learning_rate": 0.0002,
|
| 6997 |
+
"loss": 0.6009,
|
| 6998 |
+
"step": 7390
|
| 6999 |
+
},
|
| 7000 |
+
{
|
| 7001 |
+
"epoch": 2.34,
|
| 7002 |
+
"learning_rate": 0.0002,
|
| 7003 |
+
"loss": 0.5608,
|
| 7004 |
+
"step": 7400
|
| 7005 |
+
},
|
| 7006 |
+
{
|
| 7007 |
+
"epoch": 2.34,
|
| 7008 |
+
"eval_loss": 0.767185628414154,
|
| 7009 |
+
"eval_runtime": 111.2161,
|
| 7010 |
+
"eval_samples_per_second": 8.992,
|
| 7011 |
+
"eval_steps_per_second": 4.496,
|
| 7012 |
+
"step": 7400
|
| 7013 |
+
},
|
| 7014 |
+
{
|
| 7015 |
+
"epoch": 2.34,
|
| 7016 |
+
"mmlu_eval_accuracy": 0.46046773240416866,
|
| 7017 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 7018 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 7019 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 7020 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 7021 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 7022 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 7023 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 7024 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 7025 |
+
"mmlu_eval_accuracy_college_mathematics": 0.09090909090909091,
|
| 7026 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 7027 |
+
"mmlu_eval_accuracy_college_physics": 0.09090909090909091,
|
| 7028 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 7029 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 7030 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 7031 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 7032 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683,
|
| 7033 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 7034 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 7035 |
+
"mmlu_eval_accuracy_high_school_biology": 0.3125,
|
| 7036 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.13636363636363635,
|
| 7037 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 7038 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 7039 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 7040 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
| 7041 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
| 7042 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 7043 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 7044 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
| 7045 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 7046 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173,
|
| 7047 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 7048 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 7049 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
| 7050 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 7051 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 7052 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 7053 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 7054 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
| 7055 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 7056 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7057 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 7058 |
+
"mmlu_eval_accuracy_miscellaneous": 0.627906976744186,
|
| 7059 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 7060 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.27,
|
| 7061 |
+
"mmlu_eval_accuracy_nutrition": 0.5454545454545454,
|
| 7062 |
+
"mmlu_eval_accuracy_philosophy": 0.4117647058823529,
|
| 7063 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 7064 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194,
|
| 7065 |
+
"mmlu_eval_accuracy_professional_law": 0.3,
|
| 7066 |
+
"mmlu_eval_accuracy_professional_medicine": 0.6451612903225806,
|
| 7067 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
| 7068 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 7069 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 7070 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 7071 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364,
|
| 7072 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 7073 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 7074 |
+
"mmlu_loss": 1.5711101981040392,
|
| 7075 |
+
"step": 7400
|
| 7076 |
+
},
|
| 7077 |
+
{
|
| 7078 |
+
"epoch": 2.35,
|
| 7079 |
+
"learning_rate": 0.0002,
|
| 7080 |
+
"loss": 0.5974,
|
| 7081 |
+
"step": 7410
|
| 7082 |
+
},
|
| 7083 |
+
{
|
| 7084 |
+
"epoch": 2.35,
|
| 7085 |
+
"learning_rate": 0.0002,
|
| 7086 |
+
"loss": 0.5677,
|
| 7087 |
+
"step": 7420
|
| 7088 |
+
},
|
| 7089 |
+
{
|
| 7090 |
+
"epoch": 2.35,
|
| 7091 |
+
"learning_rate": 0.0002,
|
| 7092 |
+
"loss": 0.5592,
|
| 7093 |
+
"step": 7430
|
| 7094 |
+
},
|
| 7095 |
+
{
|
| 7096 |
+
"epoch": 2.36,
|
| 7097 |
+
"learning_rate": 0.0002,
|
| 7098 |
+
"loss": 0.5754,
|
| 7099 |
+
"step": 7440
|
| 7100 |
+
},
|
| 7101 |
+
{
|
| 7102 |
+
"epoch": 2.36,
|
| 7103 |
+
"learning_rate": 0.0002,
|
| 7104 |
+
"loss": 0.6117,
|
| 7105 |
+
"step": 7450
|
| 7106 |
+
},
|
| 7107 |
+
{
|
| 7108 |
+
"epoch": 2.36,
|
| 7109 |
+
"learning_rate": 0.0002,
|
| 7110 |
+
"loss": 0.5462,
|
| 7111 |
+
"step": 7460
|
| 7112 |
+
},
|
| 7113 |
+
{
|
| 7114 |
+
"epoch": 2.37,
|
| 7115 |
+
"learning_rate": 0.0002,
|
| 7116 |
+
"loss": 0.5888,
|
| 7117 |
+
"step": 7470
|
| 7118 |
+
},
|
| 7119 |
+
{
|
| 7120 |
+
"epoch": 2.37,
|
| 7121 |
+
"learning_rate": 0.0002,
|
| 7122 |
+
"loss": 0.5933,
|
| 7123 |
+
"step": 7480
|
| 7124 |
+
},
|
| 7125 |
+
{
|
| 7126 |
+
"epoch": 2.37,
|
| 7127 |
+
"learning_rate": 0.0002,
|
| 7128 |
+
"loss": 0.6329,
|
| 7129 |
+
"step": 7490
|
| 7130 |
+
},
|
| 7131 |
+
{
|
| 7132 |
+
"epoch": 2.38,
|
| 7133 |
+
"learning_rate": 0.0002,
|
| 7134 |
+
"loss": 0.6803,
|
| 7135 |
+
"step": 7500
|
| 7136 |
+
},
|
| 7137 |
+
{
|
| 7138 |
+
"epoch": 2.38,
|
| 7139 |
+
"learning_rate": 0.0002,
|
| 7140 |
+
"loss": 0.5907,
|
| 7141 |
+
"step": 7510
|
| 7142 |
+
},
|
| 7143 |
+
{
|
| 7144 |
+
"epoch": 2.38,
|
| 7145 |
+
"learning_rate": 0.0002,
|
| 7146 |
+
"loss": 0.5929,
|
| 7147 |
+
"step": 7520
|
| 7148 |
+
},
|
| 7149 |
+
{
|
| 7150 |
+
"epoch": 2.39,
|
| 7151 |
+
"learning_rate": 0.0002,
|
| 7152 |
+
"loss": 0.6288,
|
| 7153 |
+
"step": 7530
|
| 7154 |
+
},
|
| 7155 |
+
{
|
| 7156 |
+
"epoch": 2.39,
|
| 7157 |
+
"learning_rate": 0.0002,
|
| 7158 |
+
"loss": 0.5839,
|
| 7159 |
+
"step": 7540
|
| 7160 |
+
},
|
| 7161 |
+
{
|
| 7162 |
+
"epoch": 2.39,
|
| 7163 |
+
"learning_rate": 0.0002,
|
| 7164 |
+
"loss": 0.5886,
|
| 7165 |
+
"step": 7550
|
| 7166 |
+
},
|
| 7167 |
+
{
|
| 7168 |
+
"epoch": 2.4,
|
| 7169 |
+
"learning_rate": 0.0002,
|
| 7170 |
+
"loss": 0.6225,
|
| 7171 |
+
"step": 7560
|
| 7172 |
+
},
|
| 7173 |
+
{
|
| 7174 |
+
"epoch": 2.4,
|
| 7175 |
+
"learning_rate": 0.0002,
|
| 7176 |
+
"loss": 0.6009,
|
| 7177 |
+
"step": 7570
|
| 7178 |
+
},
|
| 7179 |
+
{
|
| 7180 |
+
"epoch": 2.4,
|
| 7181 |
+
"learning_rate": 0.0002,
|
| 7182 |
+
"loss": 0.5975,
|
| 7183 |
+
"step": 7580
|
| 7184 |
+
},
|
| 7185 |
+
{
|
| 7186 |
+
"epoch": 2.4,
|
| 7187 |
+
"learning_rate": 0.0002,
|
| 7188 |
+
"loss": 0.5581,
|
| 7189 |
+
"step": 7590
|
| 7190 |
+
},
|
| 7191 |
+
{
|
| 7192 |
+
"epoch": 2.41,
|
| 7193 |
+
"learning_rate": 0.0002,
|
| 7194 |
+
"loss": 0.612,
|
| 7195 |
+
"step": 7600
|
| 7196 |
+
},
|
| 7197 |
+
{
|
| 7198 |
+
"epoch": 2.41,
|
| 7199 |
+
"eval_loss": 0.76031494140625,
|
| 7200 |
+
"eval_runtime": 111.0399,
|
| 7201 |
+
"eval_samples_per_second": 9.006,
|
| 7202 |
+
"eval_steps_per_second": 4.503,
|
| 7203 |
+
"step": 7600
|
| 7204 |
+
},
|
| 7205 |
+
{
|
| 7206 |
+
"epoch": 2.41,
|
| 7207 |
+
"mmlu_eval_accuracy": 0.47951118911559576,
|
| 7208 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
| 7209 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 7210 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 7211 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 7212 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 7213 |
+
"mmlu_eval_accuracy_college_biology": 0.4375,
|
| 7214 |
+
"mmlu_eval_accuracy_college_chemistry": 0.375,
|
| 7215 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 7216 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 7217 |
+
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365,
|
| 7218 |
+
"mmlu_eval_accuracy_college_physics": 0.18181818181818182,
|
| 7219 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 7220 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 7221 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 7222 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 7223 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 7224 |
+
"mmlu_eval_accuracy_formal_logic": 0.07142857142857142,
|
| 7225 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
| 7226 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
| 7227 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 7228 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 7229 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 7230 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 7231 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 7232 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4418604651162791,
|
| 7233 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483,
|
| 7234 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 7235 |
+
"mmlu_eval_accuracy_high_school_physics": 0.0,
|
| 7236 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8833333333333333,
|
| 7237 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.21739130434782608,
|
| 7238 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 7239 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 7240 |
+
"mmlu_eval_accuracy_human_aging": 0.782608695652174,
|
| 7241 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 7242 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 7243 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 7244 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 7245 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
| 7246 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 7247 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7248 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 7249 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
| 7250 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
| 7251 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 7252 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
| 7253 |
+
"mmlu_eval_accuracy_philosophy": 0.5588235294117647,
|
| 7254 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 7255 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
| 7256 |
+
"mmlu_eval_accuracy_professional_law": 0.3352941176470588,
|
| 7257 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
| 7258 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5217391304347826,
|
| 7259 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 7260 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 7261 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 7262 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 7263 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 7264 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 7265 |
+
"mmlu_loss": 1.584926052947891,
|
| 7266 |
+
"step": 7600
|
| 7267 |
}
|
| 7268 |
],
|
| 7269 |
"max_steps": 10000,
|
| 7270 |
"num_train_epochs": 4,
|
| 7271 |
+
"total_flos": 2.3037492480033915e+18,
|
| 7272 |
"trial_name": null,
|
| 7273 |
"trial_params": null
|
| 7274 |
}
|
{checkpoint-5400 β checkpoint-7600}/training_args.bin
RENAMED
|
File without changes
|