Farouk commited on
Commit Β·
fb09852
1
Parent(s): d727b1c
Training in progress, step 8000
Browse files- adapter_model.bin +1 -1
- {checkpoint-5800 β checkpoint-7800/adapter_model/adapter_model}/README.md +0 -0
- {checkpoint-5800 β checkpoint-7800/adapter_model/adapter_model}/adapter_config.json +0 -0
- {checkpoint-5800 β checkpoint-7800/adapter_model/adapter_model}/adapter_model.bin +1 -1
- checkpoint-8000/README.md +20 -0
- checkpoint-8000/adapter_config.json +26 -0
- checkpoint-8000/adapter_model.bin +3 -0
- {checkpoint-5800 β checkpoint-8000}/added_tokens.json +0 -0
- {checkpoint-5800 β checkpoint-8000}/optimizer.pt +1 -1
- {checkpoint-5800 β checkpoint-8000}/rng_state.pth +1 -1
- {checkpoint-5800 β checkpoint-8000}/scheduler.pt +1 -1
- {checkpoint-5800 β checkpoint-8000}/special_tokens_map.json +0 -0
- {checkpoint-5800 β checkpoint-8000}/tokenizer.model +0 -0
- {checkpoint-5800 β checkpoint-8000}/tokenizer_config.json +0 -0
- {checkpoint-5800 β checkpoint-8000}/trainer_state.json +2106 -5
- {checkpoint-5800 β checkpoint-8000}/training_args.bin +0 -0
adapter_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:846bcb30f899a406d4abb1951573ab860886a9e106adde967ccc5ec3bc709dc5
|
| 3 |
size 319977229
|
{checkpoint-5800 β checkpoint-7800/adapter_model/adapter_model}/README.md
RENAMED
|
File without changes
|
{checkpoint-5800 β checkpoint-7800/adapter_model/adapter_model}/adapter_config.json
RENAMED
|
File without changes
|
{checkpoint-5800 β checkpoint-7800/adapter_model/adapter_model}/adapter_model.bin
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:339871e379ca33011c8e5834aeb3b52610bf9c7831c51c6909cfb8222a281fca
|
| 3 |
size 319977229
|
checkpoint-8000/README.md
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: peft
|
| 3 |
+
---
|
| 4 |
+
## Training procedure
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
The following `bitsandbytes` quantization config was used during training:
|
| 8 |
+
- load_in_8bit: False
|
| 9 |
+
- load_in_4bit: True
|
| 10 |
+
- llm_int8_threshold: 6.0
|
| 11 |
+
- llm_int8_skip_modules: None
|
| 12 |
+
- llm_int8_enable_fp32_cpu_offload: False
|
| 13 |
+
- llm_int8_has_fp16_weight: False
|
| 14 |
+
- bnb_4bit_quant_type: nf4
|
| 15 |
+
- bnb_4bit_use_double_quant: True
|
| 16 |
+
- bnb_4bit_compute_dtype: bfloat16
|
| 17 |
+
### Framework versions
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
- PEFT 0.4.0
|
checkpoint-8000/adapter_config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"auto_mapping": null,
|
| 3 |
+
"base_model_name_or_path": "pankajmathur/orca_mini_v3_7b",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"fan_in_fan_out": false,
|
| 6 |
+
"inference_mode": true,
|
| 7 |
+
"init_lora_weights": true,
|
| 8 |
+
"layers_pattern": null,
|
| 9 |
+
"layers_to_transform": null,
|
| 10 |
+
"lora_alpha": 16.0,
|
| 11 |
+
"lora_dropout": 0.1,
|
| 12 |
+
"modules_to_save": null,
|
| 13 |
+
"peft_type": "LORA",
|
| 14 |
+
"r": 64,
|
| 15 |
+
"revision": null,
|
| 16 |
+
"target_modules": [
|
| 17 |
+
"gate_proj",
|
| 18 |
+
"up_proj",
|
| 19 |
+
"k_proj",
|
| 20 |
+
"down_proj",
|
| 21 |
+
"o_proj",
|
| 22 |
+
"v_proj",
|
| 23 |
+
"q_proj"
|
| 24 |
+
],
|
| 25 |
+
"task_type": "CAUSAL_LM"
|
| 26 |
+
}
|
checkpoint-8000/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:846bcb30f899a406d4abb1951573ab860886a9e106adde967ccc5ec3bc709dc5
|
| 3 |
+
size 319977229
|
{checkpoint-5800 β checkpoint-8000}/added_tokens.json
RENAMED
|
File without changes
|
{checkpoint-5800 β checkpoint-8000}/optimizer.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1279539973
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:816ecc1544a3c3ddf97bd1d0e9e94459dd10e493458d2b36c8cff80cb69f6c6d
|
| 3 |
size 1279539973
|
{checkpoint-5800 β checkpoint-8000}/rng_state.pth
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14511
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8225086149f5f3d6f37661c9a006333a1477b877c6d7c56f548325ec418e797f
|
| 3 |
size 14511
|
{checkpoint-5800 β checkpoint-8000}/scheduler.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 627
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ec0a535d2c9c4c62a74336a7f93b6d947a1152f53a6066eccd4123d6b477c15c
|
| 3 |
size 627
|
{checkpoint-5800 β checkpoint-8000}/special_tokens_map.json
RENAMED
|
File without changes
|
{checkpoint-5800 β checkpoint-8000}/tokenizer.model
RENAMED
|
File without changes
|
{checkpoint-5800 β checkpoint-8000}/tokenizer_config.json
RENAMED
|
File without changes
|
{checkpoint-5800 β checkpoint-8000}/trainer_state.json
RENAMED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
-
"best_metric": 0.
|
| 3 |
-
"best_model_checkpoint": "experts/expert-3/checkpoint-
|
| 4 |
-
"epoch": 1.
|
| 5 |
-
"global_step":
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
@@ -5545,11 +5545,2112 @@
|
|
| 5545 |
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 5546 |
"mmlu_loss": 1.1274879405143678,
|
| 5547 |
"step": 5800
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5548 |
}
|
| 5549 |
],
|
| 5550 |
"max_steps": 10000,
|
| 5551 |
"num_train_epochs": 2,
|
| 5552 |
-
"total_flos":
|
| 5553 |
"trial_name": null,
|
| 5554 |
"trial_params": null
|
| 5555 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"best_metric": 0.4351891577243805,
|
| 3 |
+
"best_model_checkpoint": "experts/expert-3/checkpoint-7800",
|
| 4 |
+
"epoch": 1.579466929911155,
|
| 5 |
+
"global_step": 8000,
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
|
|
| 5545 |
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 5546 |
"mmlu_loss": 1.1274879405143678,
|
| 5547 |
"step": 5800
|
| 5548 |
+
},
|
| 5549 |
+
{
|
| 5550 |
+
"epoch": 1.15,
|
| 5551 |
+
"learning_rate": 0.0002,
|
| 5552 |
+
"loss": 0.3832,
|
| 5553 |
+
"step": 5810
|
| 5554 |
+
},
|
| 5555 |
+
{
|
| 5556 |
+
"epoch": 1.15,
|
| 5557 |
+
"learning_rate": 0.0002,
|
| 5558 |
+
"loss": 0.3735,
|
| 5559 |
+
"step": 5820
|
| 5560 |
+
},
|
| 5561 |
+
{
|
| 5562 |
+
"epoch": 1.15,
|
| 5563 |
+
"learning_rate": 0.0002,
|
| 5564 |
+
"loss": 0.3377,
|
| 5565 |
+
"step": 5830
|
| 5566 |
+
},
|
| 5567 |
+
{
|
| 5568 |
+
"epoch": 1.15,
|
| 5569 |
+
"learning_rate": 0.0002,
|
| 5570 |
+
"loss": 0.3043,
|
| 5571 |
+
"step": 5840
|
| 5572 |
+
},
|
| 5573 |
+
{
|
| 5574 |
+
"epoch": 1.15,
|
| 5575 |
+
"learning_rate": 0.0002,
|
| 5576 |
+
"loss": 0.4161,
|
| 5577 |
+
"step": 5850
|
| 5578 |
+
},
|
| 5579 |
+
{
|
| 5580 |
+
"epoch": 1.16,
|
| 5581 |
+
"learning_rate": 0.0002,
|
| 5582 |
+
"loss": 0.3896,
|
| 5583 |
+
"step": 5860
|
| 5584 |
+
},
|
| 5585 |
+
{
|
| 5586 |
+
"epoch": 1.16,
|
| 5587 |
+
"learning_rate": 0.0002,
|
| 5588 |
+
"loss": 0.3816,
|
| 5589 |
+
"step": 5870
|
| 5590 |
+
},
|
| 5591 |
+
{
|
| 5592 |
+
"epoch": 1.16,
|
| 5593 |
+
"learning_rate": 0.0002,
|
| 5594 |
+
"loss": 0.4081,
|
| 5595 |
+
"step": 5880
|
| 5596 |
+
},
|
| 5597 |
+
{
|
| 5598 |
+
"epoch": 1.16,
|
| 5599 |
+
"learning_rate": 0.0002,
|
| 5600 |
+
"loss": 0.393,
|
| 5601 |
+
"step": 5890
|
| 5602 |
+
},
|
| 5603 |
+
{
|
| 5604 |
+
"epoch": 1.16,
|
| 5605 |
+
"learning_rate": 0.0002,
|
| 5606 |
+
"loss": 0.3588,
|
| 5607 |
+
"step": 5900
|
| 5608 |
+
},
|
| 5609 |
+
{
|
| 5610 |
+
"epoch": 1.17,
|
| 5611 |
+
"learning_rate": 0.0002,
|
| 5612 |
+
"loss": 0.3957,
|
| 5613 |
+
"step": 5910
|
| 5614 |
+
},
|
| 5615 |
+
{
|
| 5616 |
+
"epoch": 1.17,
|
| 5617 |
+
"learning_rate": 0.0002,
|
| 5618 |
+
"loss": 0.3748,
|
| 5619 |
+
"step": 5920
|
| 5620 |
+
},
|
| 5621 |
+
{
|
| 5622 |
+
"epoch": 1.17,
|
| 5623 |
+
"learning_rate": 0.0002,
|
| 5624 |
+
"loss": 0.3642,
|
| 5625 |
+
"step": 5930
|
| 5626 |
+
},
|
| 5627 |
+
{
|
| 5628 |
+
"epoch": 1.17,
|
| 5629 |
+
"learning_rate": 0.0002,
|
| 5630 |
+
"loss": 0.417,
|
| 5631 |
+
"step": 5940
|
| 5632 |
+
},
|
| 5633 |
+
{
|
| 5634 |
+
"epoch": 1.17,
|
| 5635 |
+
"learning_rate": 0.0002,
|
| 5636 |
+
"loss": 0.3838,
|
| 5637 |
+
"step": 5950
|
| 5638 |
+
},
|
| 5639 |
+
{
|
| 5640 |
+
"epoch": 1.18,
|
| 5641 |
+
"learning_rate": 0.0002,
|
| 5642 |
+
"loss": 0.3685,
|
| 5643 |
+
"step": 5960
|
| 5644 |
+
},
|
| 5645 |
+
{
|
| 5646 |
+
"epoch": 1.18,
|
| 5647 |
+
"learning_rate": 0.0002,
|
| 5648 |
+
"loss": 0.3826,
|
| 5649 |
+
"step": 5970
|
| 5650 |
+
},
|
| 5651 |
+
{
|
| 5652 |
+
"epoch": 1.18,
|
| 5653 |
+
"learning_rate": 0.0002,
|
| 5654 |
+
"loss": 0.3439,
|
| 5655 |
+
"step": 5980
|
| 5656 |
+
},
|
| 5657 |
+
{
|
| 5658 |
+
"epoch": 1.18,
|
| 5659 |
+
"learning_rate": 0.0002,
|
| 5660 |
+
"loss": 0.3645,
|
| 5661 |
+
"step": 5990
|
| 5662 |
+
},
|
| 5663 |
+
{
|
| 5664 |
+
"epoch": 1.18,
|
| 5665 |
+
"learning_rate": 0.0002,
|
| 5666 |
+
"loss": 0.3679,
|
| 5667 |
+
"step": 6000
|
| 5668 |
+
},
|
| 5669 |
+
{
|
| 5670 |
+
"epoch": 1.18,
|
| 5671 |
+
"eval_loss": 0.4468071758747101,
|
| 5672 |
+
"eval_runtime": 120.9853,
|
| 5673 |
+
"eval_samples_per_second": 8.265,
|
| 5674 |
+
"eval_steps_per_second": 4.133,
|
| 5675 |
+
"step": 6000
|
| 5676 |
+
},
|
| 5677 |
+
{
|
| 5678 |
+
"epoch": 1.18,
|
| 5679 |
+
"mmlu_eval_accuracy": 0.47902924053887225,
|
| 5680 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 5681 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 5682 |
+
"mmlu_eval_accuracy_astronomy": 0.5625,
|
| 5683 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 5684 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 5685 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 5686 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 5687 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 5688 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 5689 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 5690 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 5691 |
+
"mmlu_eval_accuracy_computer_security": 0.2727272727272727,
|
| 5692 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615,
|
| 5693 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 5694 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.3125,
|
| 5695 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3902439024390244,
|
| 5696 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 5697 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 5698 |
+
"mmlu_eval_accuracy_high_school_biology": 0.34375,
|
| 5699 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365,
|
| 5700 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 5701 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
|
| 5702 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 5703 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
| 5704 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4418604651162791,
|
| 5705 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 5706 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.3076923076923077,
|
| 5707 |
+
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882,
|
| 5708 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8166666666666667,
|
| 5709 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.4782608695652174,
|
| 5710 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 5711 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 5712 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 5713 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 5714 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 5715 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 5716 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 5717 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 5718 |
+
"mmlu_eval_accuracy_management": 0.5454545454545454,
|
| 5719 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 5720 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 5721 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 5722 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 5723 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.23,
|
| 5724 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
| 5725 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 5726 |
+
"mmlu_eval_accuracy_prehistory": 0.4,
|
| 5727 |
+
"mmlu_eval_accuracy_professional_accounting": 0.45161290322580644,
|
| 5728 |
+
"mmlu_eval_accuracy_professional_law": 0.31176470588235294,
|
| 5729 |
+
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
|
| 5730 |
+
"mmlu_eval_accuracy_professional_psychology": 0.43478260869565216,
|
| 5731 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 5732 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 5733 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 5734 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 5735 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 5736 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 5737 |
+
"mmlu_loss": 1.3537822210524164,
|
| 5738 |
+
"step": 6000
|
| 5739 |
+
},
|
| 5740 |
+
{
|
| 5741 |
+
"epoch": 1.19,
|
| 5742 |
+
"learning_rate": 0.0002,
|
| 5743 |
+
"loss": 0.3572,
|
| 5744 |
+
"step": 6010
|
| 5745 |
+
},
|
| 5746 |
+
{
|
| 5747 |
+
"epoch": 1.19,
|
| 5748 |
+
"learning_rate": 0.0002,
|
| 5749 |
+
"loss": 0.4093,
|
| 5750 |
+
"step": 6020
|
| 5751 |
+
},
|
| 5752 |
+
{
|
| 5753 |
+
"epoch": 1.19,
|
| 5754 |
+
"learning_rate": 0.0002,
|
| 5755 |
+
"loss": 0.4071,
|
| 5756 |
+
"step": 6030
|
| 5757 |
+
},
|
| 5758 |
+
{
|
| 5759 |
+
"epoch": 1.19,
|
| 5760 |
+
"learning_rate": 0.0002,
|
| 5761 |
+
"loss": 0.4245,
|
| 5762 |
+
"step": 6040
|
| 5763 |
+
},
|
| 5764 |
+
{
|
| 5765 |
+
"epoch": 1.19,
|
| 5766 |
+
"learning_rate": 0.0002,
|
| 5767 |
+
"loss": 0.3755,
|
| 5768 |
+
"step": 6050
|
| 5769 |
+
},
|
| 5770 |
+
{
|
| 5771 |
+
"epoch": 1.2,
|
| 5772 |
+
"learning_rate": 0.0002,
|
| 5773 |
+
"loss": 0.3693,
|
| 5774 |
+
"step": 6060
|
| 5775 |
+
},
|
| 5776 |
+
{
|
| 5777 |
+
"epoch": 1.2,
|
| 5778 |
+
"learning_rate": 0.0002,
|
| 5779 |
+
"loss": 0.3972,
|
| 5780 |
+
"step": 6070
|
| 5781 |
+
},
|
| 5782 |
+
{
|
| 5783 |
+
"epoch": 1.2,
|
| 5784 |
+
"learning_rate": 0.0002,
|
| 5785 |
+
"loss": 0.39,
|
| 5786 |
+
"step": 6080
|
| 5787 |
+
},
|
| 5788 |
+
{
|
| 5789 |
+
"epoch": 1.2,
|
| 5790 |
+
"learning_rate": 0.0002,
|
| 5791 |
+
"loss": 0.4284,
|
| 5792 |
+
"step": 6090
|
| 5793 |
+
},
|
| 5794 |
+
{
|
| 5795 |
+
"epoch": 1.2,
|
| 5796 |
+
"learning_rate": 0.0002,
|
| 5797 |
+
"loss": 0.3538,
|
| 5798 |
+
"step": 6100
|
| 5799 |
+
},
|
| 5800 |
+
{
|
| 5801 |
+
"epoch": 1.21,
|
| 5802 |
+
"learning_rate": 0.0002,
|
| 5803 |
+
"loss": 0.3566,
|
| 5804 |
+
"step": 6110
|
| 5805 |
+
},
|
| 5806 |
+
{
|
| 5807 |
+
"epoch": 1.21,
|
| 5808 |
+
"learning_rate": 0.0002,
|
| 5809 |
+
"loss": 0.3398,
|
| 5810 |
+
"step": 6120
|
| 5811 |
+
},
|
| 5812 |
+
{
|
| 5813 |
+
"epoch": 1.21,
|
| 5814 |
+
"learning_rate": 0.0002,
|
| 5815 |
+
"loss": 0.4341,
|
| 5816 |
+
"step": 6130
|
| 5817 |
+
},
|
| 5818 |
+
{
|
| 5819 |
+
"epoch": 1.21,
|
| 5820 |
+
"learning_rate": 0.0002,
|
| 5821 |
+
"loss": 0.3967,
|
| 5822 |
+
"step": 6140
|
| 5823 |
+
},
|
| 5824 |
+
{
|
| 5825 |
+
"epoch": 1.21,
|
| 5826 |
+
"learning_rate": 0.0002,
|
| 5827 |
+
"loss": 0.3533,
|
| 5828 |
+
"step": 6150
|
| 5829 |
+
},
|
| 5830 |
+
{
|
| 5831 |
+
"epoch": 1.22,
|
| 5832 |
+
"learning_rate": 0.0002,
|
| 5833 |
+
"loss": 0.3826,
|
| 5834 |
+
"step": 6160
|
| 5835 |
+
},
|
| 5836 |
+
{
|
| 5837 |
+
"epoch": 1.22,
|
| 5838 |
+
"learning_rate": 0.0002,
|
| 5839 |
+
"loss": 0.387,
|
| 5840 |
+
"step": 6170
|
| 5841 |
+
},
|
| 5842 |
+
{
|
| 5843 |
+
"epoch": 1.22,
|
| 5844 |
+
"learning_rate": 0.0002,
|
| 5845 |
+
"loss": 0.4174,
|
| 5846 |
+
"step": 6180
|
| 5847 |
+
},
|
| 5848 |
+
{
|
| 5849 |
+
"epoch": 1.22,
|
| 5850 |
+
"learning_rate": 0.0002,
|
| 5851 |
+
"loss": 0.4021,
|
| 5852 |
+
"step": 6190
|
| 5853 |
+
},
|
| 5854 |
+
{
|
| 5855 |
+
"epoch": 1.22,
|
| 5856 |
+
"learning_rate": 0.0002,
|
| 5857 |
+
"loss": 0.3649,
|
| 5858 |
+
"step": 6200
|
| 5859 |
+
},
|
| 5860 |
+
{
|
| 5861 |
+
"epoch": 1.22,
|
| 5862 |
+
"eval_loss": 0.4450535178184509,
|
| 5863 |
+
"eval_runtime": 120.974,
|
| 5864 |
+
"eval_samples_per_second": 8.266,
|
| 5865 |
+
"eval_steps_per_second": 4.133,
|
| 5866 |
+
"step": 6200
|
| 5867 |
+
},
|
| 5868 |
+
{
|
| 5869 |
+
"epoch": 1.22,
|
| 5870 |
+
"mmlu_eval_accuracy": 0.49171108594535207,
|
| 5871 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.09090909090909091,
|
| 5872 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 5873 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 5874 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 5875 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
| 5876 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 5877 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 5878 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 5879 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 5880 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 5881 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 5882 |
+
"mmlu_eval_accuracy_computer_security": 0.2727272727272727,
|
| 5883 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 5884 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 5885 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 5886 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 5887 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 5888 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 5889 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
| 5890 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 5891 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 5892 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 5893 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 5894 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 5895 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023,
|
| 5896 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 5897 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 5898 |
+
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354,
|
| 5899 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 5900 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 5901 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 5902 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 5903 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 5904 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 5905 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 5906 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 5907 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 5908 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 5909 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 5910 |
+
"mmlu_eval_accuracy_marketing": 0.76,
|
| 5911 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 5912 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 5913 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 5914 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.26,
|
| 5915 |
+
"mmlu_eval_accuracy_nutrition": 0.5757575757575758,
|
| 5916 |
+
"mmlu_eval_accuracy_philosophy": 0.5588235294117647,
|
| 5917 |
+
"mmlu_eval_accuracy_prehistory": 0.45714285714285713,
|
| 5918 |
+
"mmlu_eval_accuracy_professional_accounting": 0.4838709677419355,
|
| 5919 |
+
"mmlu_eval_accuracy_professional_law": 0.3588235294117647,
|
| 5920 |
+
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
|
| 5921 |
+
"mmlu_eval_accuracy_professional_psychology": 0.43478260869565216,
|
| 5922 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 5923 |
+
"mmlu_eval_accuracy_security_studies": 0.5185185185185185,
|
| 5924 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 5925 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 5926 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 5927 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 5928 |
+
"mmlu_loss": 1.1403136445554678,
|
| 5929 |
+
"step": 6200
|
| 5930 |
+
},
|
| 5931 |
+
{
|
| 5932 |
+
"epoch": 1.23,
|
| 5933 |
+
"learning_rate": 0.0002,
|
| 5934 |
+
"loss": 0.3599,
|
| 5935 |
+
"step": 6210
|
| 5936 |
+
},
|
| 5937 |
+
{
|
| 5938 |
+
"epoch": 1.23,
|
| 5939 |
+
"learning_rate": 0.0002,
|
| 5940 |
+
"loss": 0.3859,
|
| 5941 |
+
"step": 6220
|
| 5942 |
+
},
|
| 5943 |
+
{
|
| 5944 |
+
"epoch": 1.23,
|
| 5945 |
+
"learning_rate": 0.0002,
|
| 5946 |
+
"loss": 0.3573,
|
| 5947 |
+
"step": 6230
|
| 5948 |
+
},
|
| 5949 |
+
{
|
| 5950 |
+
"epoch": 1.23,
|
| 5951 |
+
"learning_rate": 0.0002,
|
| 5952 |
+
"loss": 0.3245,
|
| 5953 |
+
"step": 6240
|
| 5954 |
+
},
|
| 5955 |
+
{
|
| 5956 |
+
"epoch": 1.23,
|
| 5957 |
+
"learning_rate": 0.0002,
|
| 5958 |
+
"loss": 0.4243,
|
| 5959 |
+
"step": 6250
|
| 5960 |
+
},
|
| 5961 |
+
{
|
| 5962 |
+
"epoch": 1.24,
|
| 5963 |
+
"learning_rate": 0.0002,
|
| 5964 |
+
"loss": 0.3974,
|
| 5965 |
+
"step": 6260
|
| 5966 |
+
},
|
| 5967 |
+
{
|
| 5968 |
+
"epoch": 1.24,
|
| 5969 |
+
"learning_rate": 0.0002,
|
| 5970 |
+
"loss": 0.4257,
|
| 5971 |
+
"step": 6270
|
| 5972 |
+
},
|
| 5973 |
+
{
|
| 5974 |
+
"epoch": 1.24,
|
| 5975 |
+
"learning_rate": 0.0002,
|
| 5976 |
+
"loss": 0.3506,
|
| 5977 |
+
"step": 6280
|
| 5978 |
+
},
|
| 5979 |
+
{
|
| 5980 |
+
"epoch": 1.24,
|
| 5981 |
+
"learning_rate": 0.0002,
|
| 5982 |
+
"loss": 0.4198,
|
| 5983 |
+
"step": 6290
|
| 5984 |
+
},
|
| 5985 |
+
{
|
| 5986 |
+
"epoch": 1.24,
|
| 5987 |
+
"learning_rate": 0.0002,
|
| 5988 |
+
"loss": 0.3741,
|
| 5989 |
+
"step": 6300
|
| 5990 |
+
},
|
| 5991 |
+
{
|
| 5992 |
+
"epoch": 1.25,
|
| 5993 |
+
"learning_rate": 0.0002,
|
| 5994 |
+
"loss": 0.3465,
|
| 5995 |
+
"step": 6310
|
| 5996 |
+
},
|
| 5997 |
+
{
|
| 5998 |
+
"epoch": 1.25,
|
| 5999 |
+
"learning_rate": 0.0002,
|
| 6000 |
+
"loss": 0.3988,
|
| 6001 |
+
"step": 6320
|
| 6002 |
+
},
|
| 6003 |
+
{
|
| 6004 |
+
"epoch": 1.25,
|
| 6005 |
+
"learning_rate": 0.0002,
|
| 6006 |
+
"loss": 0.3865,
|
| 6007 |
+
"step": 6330
|
| 6008 |
+
},
|
| 6009 |
+
{
|
| 6010 |
+
"epoch": 1.25,
|
| 6011 |
+
"learning_rate": 0.0002,
|
| 6012 |
+
"loss": 0.3699,
|
| 6013 |
+
"step": 6340
|
| 6014 |
+
},
|
| 6015 |
+
{
|
| 6016 |
+
"epoch": 1.25,
|
| 6017 |
+
"learning_rate": 0.0002,
|
| 6018 |
+
"loss": 0.3522,
|
| 6019 |
+
"step": 6350
|
| 6020 |
+
},
|
| 6021 |
+
{
|
| 6022 |
+
"epoch": 1.26,
|
| 6023 |
+
"learning_rate": 0.0002,
|
| 6024 |
+
"loss": 0.4129,
|
| 6025 |
+
"step": 6360
|
| 6026 |
+
},
|
| 6027 |
+
{
|
| 6028 |
+
"epoch": 1.26,
|
| 6029 |
+
"learning_rate": 0.0002,
|
| 6030 |
+
"loss": 0.358,
|
| 6031 |
+
"step": 6370
|
| 6032 |
+
},
|
| 6033 |
+
{
|
| 6034 |
+
"epoch": 1.26,
|
| 6035 |
+
"learning_rate": 0.0002,
|
| 6036 |
+
"loss": 0.4189,
|
| 6037 |
+
"step": 6380
|
| 6038 |
+
},
|
| 6039 |
+
{
|
| 6040 |
+
"epoch": 1.26,
|
| 6041 |
+
"learning_rate": 0.0002,
|
| 6042 |
+
"loss": 0.399,
|
| 6043 |
+
"step": 6390
|
| 6044 |
+
},
|
| 6045 |
+
{
|
| 6046 |
+
"epoch": 1.26,
|
| 6047 |
+
"learning_rate": 0.0002,
|
| 6048 |
+
"loss": 0.3877,
|
| 6049 |
+
"step": 6400
|
| 6050 |
+
},
|
| 6051 |
+
{
|
| 6052 |
+
"epoch": 1.26,
|
| 6053 |
+
"eval_loss": 0.4440629780292511,
|
| 6054 |
+
"eval_runtime": 120.9414,
|
| 6055 |
+
"eval_samples_per_second": 8.268,
|
| 6056 |
+
"eval_steps_per_second": 4.134,
|
| 6057 |
+
"step": 6400
|
| 6058 |
+
},
|
| 6059 |
+
{
|
| 6060 |
+
"epoch": 1.26,
|
| 6061 |
+
"mmlu_eval_accuracy": 0.4867509959392518,
|
| 6062 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 6063 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 6064 |
+
"mmlu_eval_accuracy_astronomy": 0.375,
|
| 6065 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 6066 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
| 6067 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6068 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 6069 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 6070 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 6071 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
| 6072 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 6073 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 6074 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 6075 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6076 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.1875,
|
| 6077 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 6078 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 6079 |
+
"mmlu_eval_accuracy_global_facts": 0.6,
|
| 6080 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
| 6081 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.2727272727272727,
|
| 6082 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6083 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 6084 |
+
"mmlu_eval_accuracy_high_school_geography": 0.9090909090909091,
|
| 6085 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6086 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395,
|
| 6087 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6088 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 6089 |
+
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354,
|
| 6090 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8166666666666667,
|
| 6091 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 6092 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 6093 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 6094 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 6095 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 6096 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6097 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6098 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
| 6099 |
+
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365,
|
| 6100 |
+
"mmlu_eval_accuracy_management": 0.5454545454545454,
|
| 6101 |
+
"mmlu_eval_accuracy_marketing": 0.72,
|
| 6102 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6103 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 6104 |
+
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316,
|
| 6105 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.3,
|
| 6106 |
+
"mmlu_eval_accuracy_nutrition": 0.5757575757575758,
|
| 6107 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 6108 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 6109 |
+
"mmlu_eval_accuracy_professional_accounting": 0.45161290322580644,
|
| 6110 |
+
"mmlu_eval_accuracy_professional_law": 0.3352941176470588,
|
| 6111 |
+
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
|
| 6112 |
+
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087,
|
| 6113 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 6114 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 6115 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 6116 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 6117 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 6118 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 6119 |
+
"mmlu_loss": 1.0828519074863927,
|
| 6120 |
+
"step": 6400
|
| 6121 |
+
},
|
| 6122 |
+
{
|
| 6123 |
+
"epoch": 1.27,
|
| 6124 |
+
"learning_rate": 0.0002,
|
| 6125 |
+
"loss": 0.3989,
|
| 6126 |
+
"step": 6410
|
| 6127 |
+
},
|
| 6128 |
+
{
|
| 6129 |
+
"epoch": 1.27,
|
| 6130 |
+
"learning_rate": 0.0002,
|
| 6131 |
+
"loss": 0.3786,
|
| 6132 |
+
"step": 6420
|
| 6133 |
+
},
|
| 6134 |
+
{
|
| 6135 |
+
"epoch": 1.27,
|
| 6136 |
+
"learning_rate": 0.0002,
|
| 6137 |
+
"loss": 0.3326,
|
| 6138 |
+
"step": 6430
|
| 6139 |
+
},
|
| 6140 |
+
{
|
| 6141 |
+
"epoch": 1.27,
|
| 6142 |
+
"learning_rate": 0.0002,
|
| 6143 |
+
"loss": 0.3602,
|
| 6144 |
+
"step": 6440
|
| 6145 |
+
},
|
| 6146 |
+
{
|
| 6147 |
+
"epoch": 1.27,
|
| 6148 |
+
"learning_rate": 0.0002,
|
| 6149 |
+
"loss": 0.414,
|
| 6150 |
+
"step": 6450
|
| 6151 |
+
},
|
| 6152 |
+
{
|
| 6153 |
+
"epoch": 1.28,
|
| 6154 |
+
"learning_rate": 0.0002,
|
| 6155 |
+
"loss": 0.3756,
|
| 6156 |
+
"step": 6460
|
| 6157 |
+
},
|
| 6158 |
+
{
|
| 6159 |
+
"epoch": 1.28,
|
| 6160 |
+
"learning_rate": 0.0002,
|
| 6161 |
+
"loss": 0.3688,
|
| 6162 |
+
"step": 6470
|
| 6163 |
+
},
|
| 6164 |
+
{
|
| 6165 |
+
"epoch": 1.28,
|
| 6166 |
+
"learning_rate": 0.0002,
|
| 6167 |
+
"loss": 0.3732,
|
| 6168 |
+
"step": 6480
|
| 6169 |
+
},
|
| 6170 |
+
{
|
| 6171 |
+
"epoch": 1.28,
|
| 6172 |
+
"learning_rate": 0.0002,
|
| 6173 |
+
"loss": 0.3914,
|
| 6174 |
+
"step": 6490
|
| 6175 |
+
},
|
| 6176 |
+
{
|
| 6177 |
+
"epoch": 1.28,
|
| 6178 |
+
"learning_rate": 0.0002,
|
| 6179 |
+
"loss": 0.357,
|
| 6180 |
+
"step": 6500
|
| 6181 |
+
},
|
| 6182 |
+
{
|
| 6183 |
+
"epoch": 1.29,
|
| 6184 |
+
"learning_rate": 0.0002,
|
| 6185 |
+
"loss": 0.343,
|
| 6186 |
+
"step": 6510
|
| 6187 |
+
},
|
| 6188 |
+
{
|
| 6189 |
+
"epoch": 1.29,
|
| 6190 |
+
"learning_rate": 0.0002,
|
| 6191 |
+
"loss": 0.3942,
|
| 6192 |
+
"step": 6520
|
| 6193 |
+
},
|
| 6194 |
+
{
|
| 6195 |
+
"epoch": 1.29,
|
| 6196 |
+
"learning_rate": 0.0002,
|
| 6197 |
+
"loss": 0.373,
|
| 6198 |
+
"step": 6530
|
| 6199 |
+
},
|
| 6200 |
+
{
|
| 6201 |
+
"epoch": 1.29,
|
| 6202 |
+
"learning_rate": 0.0002,
|
| 6203 |
+
"loss": 0.3962,
|
| 6204 |
+
"step": 6540
|
| 6205 |
+
},
|
| 6206 |
+
{
|
| 6207 |
+
"epoch": 1.29,
|
| 6208 |
+
"learning_rate": 0.0002,
|
| 6209 |
+
"loss": 0.407,
|
| 6210 |
+
"step": 6550
|
| 6211 |
+
},
|
| 6212 |
+
{
|
| 6213 |
+
"epoch": 1.3,
|
| 6214 |
+
"learning_rate": 0.0002,
|
| 6215 |
+
"loss": 0.3611,
|
| 6216 |
+
"step": 6560
|
| 6217 |
+
},
|
| 6218 |
+
{
|
| 6219 |
+
"epoch": 1.3,
|
| 6220 |
+
"learning_rate": 0.0002,
|
| 6221 |
+
"loss": 0.3557,
|
| 6222 |
+
"step": 6570
|
| 6223 |
+
},
|
| 6224 |
+
{
|
| 6225 |
+
"epoch": 1.3,
|
| 6226 |
+
"learning_rate": 0.0002,
|
| 6227 |
+
"loss": 0.4006,
|
| 6228 |
+
"step": 6580
|
| 6229 |
+
},
|
| 6230 |
+
{
|
| 6231 |
+
"epoch": 1.3,
|
| 6232 |
+
"learning_rate": 0.0002,
|
| 6233 |
+
"loss": 0.3899,
|
| 6234 |
+
"step": 6590
|
| 6235 |
+
},
|
| 6236 |
+
{
|
| 6237 |
+
"epoch": 1.3,
|
| 6238 |
+
"learning_rate": 0.0002,
|
| 6239 |
+
"loss": 0.4637,
|
| 6240 |
+
"step": 6600
|
| 6241 |
+
},
|
| 6242 |
+
{
|
| 6243 |
+
"epoch": 1.3,
|
| 6244 |
+
"eval_loss": 0.44296130537986755,
|
| 6245 |
+
"eval_runtime": 120.9444,
|
| 6246 |
+
"eval_samples_per_second": 8.268,
|
| 6247 |
+
"eval_steps_per_second": 4.134,
|
| 6248 |
+
"step": 6600
|
| 6249 |
+
},
|
| 6250 |
+
{
|
| 6251 |
+
"epoch": 1.3,
|
| 6252 |
+
"mmlu_eval_accuracy": 0.5059018732754555,
|
| 6253 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 6254 |
+
"mmlu_eval_accuracy_anatomy": 0.7142857142857143,
|
| 6255 |
+
"mmlu_eval_accuracy_astronomy": 0.375,
|
| 6256 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 6257 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
| 6258 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 6259 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 6260 |
+
"mmlu_eval_accuracy_college_computer_science": 0.5454545454545454,
|
| 6261 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 6262 |
+
"mmlu_eval_accuracy_college_medicine": 0.5,
|
| 6263 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 6264 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 6265 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.46153846153846156,
|
| 6266 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6267 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 6268 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 6269 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 6270 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 6271 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 6272 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.5,
|
| 6273 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6274 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
|
| 6275 |
+
"mmlu_eval_accuracy_high_school_geography": 0.9090909090909091,
|
| 6276 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6277 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256,
|
| 6278 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6279 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231,
|
| 6280 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 6281 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 6282 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 6283 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 6284 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 6285 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 6286 |
+
"mmlu_eval_accuracy_human_sexuality": 0.5,
|
| 6287 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6288 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 6289 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
| 6290 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 6291 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 6292 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 6293 |
+
"mmlu_eval_accuracy_medical_genetics": 1.0,
|
| 6294 |
+
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907,
|
| 6295 |
+
"mmlu_eval_accuracy_moral_disputes": 0.42105263157894735,
|
| 6296 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 6297 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
| 6298 |
+
"mmlu_eval_accuracy_philosophy": 0.5294117647058824,
|
| 6299 |
+
"mmlu_eval_accuracy_prehistory": 0.45714285714285713,
|
| 6300 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 6301 |
+
"mmlu_eval_accuracy_professional_law": 0.3411764705882353,
|
| 6302 |
+
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
|
| 6303 |
+
"mmlu_eval_accuracy_professional_psychology": 0.43478260869565216,
|
| 6304 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 6305 |
+
"mmlu_eval_accuracy_security_studies": 0.5185185185185185,
|
| 6306 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 6307 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 6308 |
+
"mmlu_eval_accuracy_virology": 0.4444444444444444,
|
| 6309 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 6310 |
+
"mmlu_loss": 1.178827084542566,
|
| 6311 |
+
"step": 6600
|
| 6312 |
+
},
|
| 6313 |
+
{
|
| 6314 |
+
"epoch": 1.31,
|
| 6315 |
+
"learning_rate": 0.0002,
|
| 6316 |
+
"loss": 0.3946,
|
| 6317 |
+
"step": 6610
|
| 6318 |
+
},
|
| 6319 |
+
{
|
| 6320 |
+
"epoch": 1.31,
|
| 6321 |
+
"learning_rate": 0.0002,
|
| 6322 |
+
"loss": 0.3958,
|
| 6323 |
+
"step": 6620
|
| 6324 |
+
},
|
| 6325 |
+
{
|
| 6326 |
+
"epoch": 1.31,
|
| 6327 |
+
"learning_rate": 0.0002,
|
| 6328 |
+
"loss": 0.3441,
|
| 6329 |
+
"step": 6630
|
| 6330 |
+
},
|
| 6331 |
+
{
|
| 6332 |
+
"epoch": 1.31,
|
| 6333 |
+
"learning_rate": 0.0002,
|
| 6334 |
+
"loss": 0.3368,
|
| 6335 |
+
"step": 6640
|
| 6336 |
+
},
|
| 6337 |
+
{
|
| 6338 |
+
"epoch": 1.31,
|
| 6339 |
+
"learning_rate": 0.0002,
|
| 6340 |
+
"loss": 0.3992,
|
| 6341 |
+
"step": 6650
|
| 6342 |
+
},
|
| 6343 |
+
{
|
| 6344 |
+
"epoch": 1.31,
|
| 6345 |
+
"learning_rate": 0.0002,
|
| 6346 |
+
"loss": 0.359,
|
| 6347 |
+
"step": 6660
|
| 6348 |
+
},
|
| 6349 |
+
{
|
| 6350 |
+
"epoch": 1.32,
|
| 6351 |
+
"learning_rate": 0.0002,
|
| 6352 |
+
"loss": 0.4192,
|
| 6353 |
+
"step": 6670
|
| 6354 |
+
},
|
| 6355 |
+
{
|
| 6356 |
+
"epoch": 1.32,
|
| 6357 |
+
"learning_rate": 0.0002,
|
| 6358 |
+
"loss": 0.3531,
|
| 6359 |
+
"step": 6680
|
| 6360 |
+
},
|
| 6361 |
+
{
|
| 6362 |
+
"epoch": 1.32,
|
| 6363 |
+
"learning_rate": 0.0002,
|
| 6364 |
+
"loss": 0.3698,
|
| 6365 |
+
"step": 6690
|
| 6366 |
+
},
|
| 6367 |
+
{
|
| 6368 |
+
"epoch": 1.32,
|
| 6369 |
+
"learning_rate": 0.0002,
|
| 6370 |
+
"loss": 0.4178,
|
| 6371 |
+
"step": 6700
|
| 6372 |
+
},
|
| 6373 |
+
{
|
| 6374 |
+
"epoch": 1.32,
|
| 6375 |
+
"learning_rate": 0.0002,
|
| 6376 |
+
"loss": 0.3839,
|
| 6377 |
+
"step": 6710
|
| 6378 |
+
},
|
| 6379 |
+
{
|
| 6380 |
+
"epoch": 1.33,
|
| 6381 |
+
"learning_rate": 0.0002,
|
| 6382 |
+
"loss": 0.3901,
|
| 6383 |
+
"step": 6720
|
| 6384 |
+
},
|
| 6385 |
+
{
|
| 6386 |
+
"epoch": 1.33,
|
| 6387 |
+
"learning_rate": 0.0002,
|
| 6388 |
+
"loss": 0.4016,
|
| 6389 |
+
"step": 6730
|
| 6390 |
+
},
|
| 6391 |
+
{
|
| 6392 |
+
"epoch": 1.33,
|
| 6393 |
+
"learning_rate": 0.0002,
|
| 6394 |
+
"loss": 0.4134,
|
| 6395 |
+
"step": 6740
|
| 6396 |
+
},
|
| 6397 |
+
{
|
| 6398 |
+
"epoch": 1.33,
|
| 6399 |
+
"learning_rate": 0.0002,
|
| 6400 |
+
"loss": 0.3701,
|
| 6401 |
+
"step": 6750
|
| 6402 |
+
},
|
| 6403 |
+
{
|
| 6404 |
+
"epoch": 1.33,
|
| 6405 |
+
"learning_rate": 0.0002,
|
| 6406 |
+
"loss": 0.3915,
|
| 6407 |
+
"step": 6760
|
| 6408 |
+
},
|
| 6409 |
+
{
|
| 6410 |
+
"epoch": 1.34,
|
| 6411 |
+
"learning_rate": 0.0002,
|
| 6412 |
+
"loss": 0.312,
|
| 6413 |
+
"step": 6770
|
| 6414 |
+
},
|
| 6415 |
+
{
|
| 6416 |
+
"epoch": 1.34,
|
| 6417 |
+
"learning_rate": 0.0002,
|
| 6418 |
+
"loss": 0.4279,
|
| 6419 |
+
"step": 6780
|
| 6420 |
+
},
|
| 6421 |
+
{
|
| 6422 |
+
"epoch": 1.34,
|
| 6423 |
+
"learning_rate": 0.0002,
|
| 6424 |
+
"loss": 0.4226,
|
| 6425 |
+
"step": 6790
|
| 6426 |
+
},
|
| 6427 |
+
{
|
| 6428 |
+
"epoch": 1.34,
|
| 6429 |
+
"learning_rate": 0.0002,
|
| 6430 |
+
"loss": 0.415,
|
| 6431 |
+
"step": 6800
|
| 6432 |
+
},
|
| 6433 |
+
{
|
| 6434 |
+
"epoch": 1.34,
|
| 6435 |
+
"eval_loss": 0.44151541590690613,
|
| 6436 |
+
"eval_runtime": 120.9028,
|
| 6437 |
+
"eval_samples_per_second": 8.271,
|
| 6438 |
+
"eval_steps_per_second": 4.136,
|
| 6439 |
+
"step": 6800
|
| 6440 |
+
},
|
| 6441 |
+
{
|
| 6442 |
+
"epoch": 1.34,
|
| 6443 |
+
"mmlu_eval_accuracy": 0.5084804815045342,
|
| 6444 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 6445 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 6446 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 6447 |
+
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364,
|
| 6448 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
| 6449 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 6450 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 6451 |
+
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
|
| 6452 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 6453 |
+
"mmlu_eval_accuracy_college_medicine": 0.5,
|
| 6454 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 6455 |
+
"mmlu_eval_accuracy_computer_security": 0.2727272727272727,
|
| 6456 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615,
|
| 6457 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6458 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 6459 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073,
|
| 6460 |
+
"mmlu_eval_accuracy_formal_logic": 0.07142857142857142,
|
| 6461 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 6462 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 6463 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.45454545454545453,
|
| 6464 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6465 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.7777777777777778,
|
| 6466 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 6467 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6468 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395,
|
| 6469 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6470 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5,
|
| 6471 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 6472 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 6473 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 6474 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 6475 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 6476 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 6477 |
+
"mmlu_eval_accuracy_human_sexuality": 0.5,
|
| 6478 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6479 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 6480 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6481 |
+
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365,
|
| 6482 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 6483 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 6484 |
+
"mmlu_eval_accuracy_medical_genetics": 1.0,
|
| 6485 |
+
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907,
|
| 6486 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 6487 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 6488 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
| 6489 |
+
"mmlu_eval_accuracy_philosophy": 0.5294117647058824,
|
| 6490 |
+
"mmlu_eval_accuracy_prehistory": 0.42857142857142855,
|
| 6491 |
+
"mmlu_eval_accuracy_professional_accounting": 0.45161290322580644,
|
| 6492 |
+
"mmlu_eval_accuracy_professional_law": 0.3352941176470588,
|
| 6493 |
+
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
|
| 6494 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4492753623188406,
|
| 6495 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 6496 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 6497 |
+
"mmlu_eval_accuracy_sociology": 0.8181818181818182,
|
| 6498 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 6499 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 6500 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 6501 |
+
"mmlu_loss": 1.193184396266626,
|
| 6502 |
+
"step": 6800
|
| 6503 |
+
},
|
| 6504 |
+
{
|
| 6505 |
+
"epoch": 1.34,
|
| 6506 |
+
"learning_rate": 0.0002,
|
| 6507 |
+
"loss": 0.4017,
|
| 6508 |
+
"step": 6810
|
| 6509 |
+
},
|
| 6510 |
+
{
|
| 6511 |
+
"epoch": 1.35,
|
| 6512 |
+
"learning_rate": 0.0002,
|
| 6513 |
+
"loss": 0.3976,
|
| 6514 |
+
"step": 6820
|
| 6515 |
+
},
|
| 6516 |
+
{
|
| 6517 |
+
"epoch": 1.35,
|
| 6518 |
+
"learning_rate": 0.0002,
|
| 6519 |
+
"loss": 0.3451,
|
| 6520 |
+
"step": 6830
|
| 6521 |
+
},
|
| 6522 |
+
{
|
| 6523 |
+
"epoch": 1.35,
|
| 6524 |
+
"learning_rate": 0.0002,
|
| 6525 |
+
"loss": 0.3789,
|
| 6526 |
+
"step": 6840
|
| 6527 |
+
},
|
| 6528 |
+
{
|
| 6529 |
+
"epoch": 1.35,
|
| 6530 |
+
"learning_rate": 0.0002,
|
| 6531 |
+
"loss": 0.3654,
|
| 6532 |
+
"step": 6850
|
| 6533 |
+
},
|
| 6534 |
+
{
|
| 6535 |
+
"epoch": 1.35,
|
| 6536 |
+
"learning_rate": 0.0002,
|
| 6537 |
+
"loss": 0.4088,
|
| 6538 |
+
"step": 6860
|
| 6539 |
+
},
|
| 6540 |
+
{
|
| 6541 |
+
"epoch": 1.36,
|
| 6542 |
+
"learning_rate": 0.0002,
|
| 6543 |
+
"loss": 0.3614,
|
| 6544 |
+
"step": 6870
|
| 6545 |
+
},
|
| 6546 |
+
{
|
| 6547 |
+
"epoch": 1.36,
|
| 6548 |
+
"learning_rate": 0.0002,
|
| 6549 |
+
"loss": 0.4376,
|
| 6550 |
+
"step": 6880
|
| 6551 |
+
},
|
| 6552 |
+
{
|
| 6553 |
+
"epoch": 1.36,
|
| 6554 |
+
"learning_rate": 0.0002,
|
| 6555 |
+
"loss": 0.4113,
|
| 6556 |
+
"step": 6890
|
| 6557 |
+
},
|
| 6558 |
+
{
|
| 6559 |
+
"epoch": 1.36,
|
| 6560 |
+
"learning_rate": 0.0002,
|
| 6561 |
+
"loss": 0.384,
|
| 6562 |
+
"step": 6900
|
| 6563 |
+
},
|
| 6564 |
+
{
|
| 6565 |
+
"epoch": 1.36,
|
| 6566 |
+
"learning_rate": 0.0002,
|
| 6567 |
+
"loss": 0.3689,
|
| 6568 |
+
"step": 6910
|
| 6569 |
+
},
|
| 6570 |
+
{
|
| 6571 |
+
"epoch": 1.37,
|
| 6572 |
+
"learning_rate": 0.0002,
|
| 6573 |
+
"loss": 0.3565,
|
| 6574 |
+
"step": 6920
|
| 6575 |
+
},
|
| 6576 |
+
{
|
| 6577 |
+
"epoch": 1.37,
|
| 6578 |
+
"learning_rate": 0.0002,
|
| 6579 |
+
"loss": 0.3899,
|
| 6580 |
+
"step": 6930
|
| 6581 |
+
},
|
| 6582 |
+
{
|
| 6583 |
+
"epoch": 1.37,
|
| 6584 |
+
"learning_rate": 0.0002,
|
| 6585 |
+
"loss": 0.392,
|
| 6586 |
+
"step": 6940
|
| 6587 |
+
},
|
| 6588 |
+
{
|
| 6589 |
+
"epoch": 1.37,
|
| 6590 |
+
"learning_rate": 0.0002,
|
| 6591 |
+
"loss": 0.3805,
|
| 6592 |
+
"step": 6950
|
| 6593 |
+
},
|
| 6594 |
+
{
|
| 6595 |
+
"epoch": 1.37,
|
| 6596 |
+
"learning_rate": 0.0002,
|
| 6597 |
+
"loss": 0.3245,
|
| 6598 |
+
"step": 6960
|
| 6599 |
+
},
|
| 6600 |
+
{
|
| 6601 |
+
"epoch": 1.38,
|
| 6602 |
+
"learning_rate": 0.0002,
|
| 6603 |
+
"loss": 0.3815,
|
| 6604 |
+
"step": 6970
|
| 6605 |
+
},
|
| 6606 |
+
{
|
| 6607 |
+
"epoch": 1.38,
|
| 6608 |
+
"learning_rate": 0.0002,
|
| 6609 |
+
"loss": 0.353,
|
| 6610 |
+
"step": 6980
|
| 6611 |
+
},
|
| 6612 |
+
{
|
| 6613 |
+
"epoch": 1.38,
|
| 6614 |
+
"learning_rate": 0.0002,
|
| 6615 |
+
"loss": 0.3542,
|
| 6616 |
+
"step": 6990
|
| 6617 |
+
},
|
| 6618 |
+
{
|
| 6619 |
+
"epoch": 1.38,
|
| 6620 |
+
"learning_rate": 0.0002,
|
| 6621 |
+
"loss": 0.4175,
|
| 6622 |
+
"step": 7000
|
| 6623 |
+
},
|
| 6624 |
+
{
|
| 6625 |
+
"epoch": 1.38,
|
| 6626 |
+
"eval_loss": 0.44231361150741577,
|
| 6627 |
+
"eval_runtime": 120.8967,
|
| 6628 |
+
"eval_samples_per_second": 8.272,
|
| 6629 |
+
"eval_steps_per_second": 4.136,
|
| 6630 |
+
"step": 7000
|
| 6631 |
+
},
|
| 6632 |
+
{
|
| 6633 |
+
"epoch": 1.38,
|
| 6634 |
+
"mmlu_eval_accuracy": 0.5054198185434623,
|
| 6635 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 6636 |
+
"mmlu_eval_accuracy_anatomy": 0.7142857142857143,
|
| 6637 |
+
"mmlu_eval_accuracy_astronomy": 0.625,
|
| 6638 |
+
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364,
|
| 6639 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 6640 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 6641 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 6642 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 6643 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 6644 |
+
"mmlu_eval_accuracy_college_medicine": 0.5,
|
| 6645 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 6646 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 6647 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 6648 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6649 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 6650 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.43902439024390244,
|
| 6651 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 6652 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 6653 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 6654 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
|
| 6655 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6656 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 6657 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 6658 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6659 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256,
|
| 6660 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6661 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 6662 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 6663 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8166666666666667,
|
| 6664 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
| 6665 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 6666 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 6667 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 6668 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 6669 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6670 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 6671 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6672 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 6673 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 6674 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
| 6675 |
+
"mmlu_eval_accuracy_medical_genetics": 1.0,
|
| 6676 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628,
|
| 6677 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 6678 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 6679 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 6680 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 6681 |
+
"mmlu_eval_accuracy_prehistory": 0.42857142857142855,
|
| 6682 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 6683 |
+
"mmlu_eval_accuracy_professional_law": 0.32941176470588235,
|
| 6684 |
+
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744,
|
| 6685 |
+
"mmlu_eval_accuracy_professional_psychology": 0.43478260869565216,
|
| 6686 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 6687 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 6688 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 6689 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.9090909090909091,
|
| 6690 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 6691 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 6692 |
+
"mmlu_loss": 1.1865613444200072,
|
| 6693 |
+
"step": 7000
|
| 6694 |
+
},
|
| 6695 |
+
{
|
| 6696 |
+
"epoch": 1.38,
|
| 6697 |
+
"learning_rate": 0.0002,
|
| 6698 |
+
"loss": 0.4169,
|
| 6699 |
+
"step": 7010
|
| 6700 |
+
},
|
| 6701 |
+
{
|
| 6702 |
+
"epoch": 1.39,
|
| 6703 |
+
"learning_rate": 0.0002,
|
| 6704 |
+
"loss": 0.3836,
|
| 6705 |
+
"step": 7020
|
| 6706 |
+
},
|
| 6707 |
+
{
|
| 6708 |
+
"epoch": 1.39,
|
| 6709 |
+
"learning_rate": 0.0002,
|
| 6710 |
+
"loss": 0.3449,
|
| 6711 |
+
"step": 7030
|
| 6712 |
+
},
|
| 6713 |
+
{
|
| 6714 |
+
"epoch": 1.39,
|
| 6715 |
+
"learning_rate": 0.0002,
|
| 6716 |
+
"loss": 0.4059,
|
| 6717 |
+
"step": 7040
|
| 6718 |
+
},
|
| 6719 |
+
{
|
| 6720 |
+
"epoch": 1.39,
|
| 6721 |
+
"learning_rate": 0.0002,
|
| 6722 |
+
"loss": 0.3668,
|
| 6723 |
+
"step": 7050
|
| 6724 |
+
},
|
| 6725 |
+
{
|
| 6726 |
+
"epoch": 1.39,
|
| 6727 |
+
"learning_rate": 0.0002,
|
| 6728 |
+
"loss": 0.4043,
|
| 6729 |
+
"step": 7060
|
| 6730 |
+
},
|
| 6731 |
+
{
|
| 6732 |
+
"epoch": 1.4,
|
| 6733 |
+
"learning_rate": 0.0002,
|
| 6734 |
+
"loss": 0.3529,
|
| 6735 |
+
"step": 7070
|
| 6736 |
+
},
|
| 6737 |
+
{
|
| 6738 |
+
"epoch": 1.4,
|
| 6739 |
+
"learning_rate": 0.0002,
|
| 6740 |
+
"loss": 0.3659,
|
| 6741 |
+
"step": 7080
|
| 6742 |
+
},
|
| 6743 |
+
{
|
| 6744 |
+
"epoch": 1.4,
|
| 6745 |
+
"learning_rate": 0.0002,
|
| 6746 |
+
"loss": 0.4007,
|
| 6747 |
+
"step": 7090
|
| 6748 |
+
},
|
| 6749 |
+
{
|
| 6750 |
+
"epoch": 1.4,
|
| 6751 |
+
"learning_rate": 0.0002,
|
| 6752 |
+
"loss": 0.4162,
|
| 6753 |
+
"step": 7100
|
| 6754 |
+
},
|
| 6755 |
+
{
|
| 6756 |
+
"epoch": 1.4,
|
| 6757 |
+
"learning_rate": 0.0002,
|
| 6758 |
+
"loss": 0.3846,
|
| 6759 |
+
"step": 7110
|
| 6760 |
+
},
|
| 6761 |
+
{
|
| 6762 |
+
"epoch": 1.41,
|
| 6763 |
+
"learning_rate": 0.0002,
|
| 6764 |
+
"loss": 0.4277,
|
| 6765 |
+
"step": 7120
|
| 6766 |
+
},
|
| 6767 |
+
{
|
| 6768 |
+
"epoch": 1.41,
|
| 6769 |
+
"learning_rate": 0.0002,
|
| 6770 |
+
"loss": 0.4338,
|
| 6771 |
+
"step": 7130
|
| 6772 |
+
},
|
| 6773 |
+
{
|
| 6774 |
+
"epoch": 1.41,
|
| 6775 |
+
"learning_rate": 0.0002,
|
| 6776 |
+
"loss": 0.3412,
|
| 6777 |
+
"step": 7140
|
| 6778 |
+
},
|
| 6779 |
+
{
|
| 6780 |
+
"epoch": 1.41,
|
| 6781 |
+
"learning_rate": 0.0002,
|
| 6782 |
+
"loss": 0.4108,
|
| 6783 |
+
"step": 7150
|
| 6784 |
+
},
|
| 6785 |
+
{
|
| 6786 |
+
"epoch": 1.41,
|
| 6787 |
+
"learning_rate": 0.0002,
|
| 6788 |
+
"loss": 0.4078,
|
| 6789 |
+
"step": 7160
|
| 6790 |
+
},
|
| 6791 |
+
{
|
| 6792 |
+
"epoch": 1.42,
|
| 6793 |
+
"learning_rate": 0.0002,
|
| 6794 |
+
"loss": 0.3698,
|
| 6795 |
+
"step": 7170
|
| 6796 |
+
},
|
| 6797 |
+
{
|
| 6798 |
+
"epoch": 1.42,
|
| 6799 |
+
"learning_rate": 0.0002,
|
| 6800 |
+
"loss": 0.4155,
|
| 6801 |
+
"step": 7180
|
| 6802 |
+
},
|
| 6803 |
+
{
|
| 6804 |
+
"epoch": 1.42,
|
| 6805 |
+
"learning_rate": 0.0002,
|
| 6806 |
+
"loss": 0.3653,
|
| 6807 |
+
"step": 7190
|
| 6808 |
+
},
|
| 6809 |
+
{
|
| 6810 |
+
"epoch": 1.42,
|
| 6811 |
+
"learning_rate": 0.0002,
|
| 6812 |
+
"loss": 0.3598,
|
| 6813 |
+
"step": 7200
|
| 6814 |
+
},
|
| 6815 |
+
{
|
| 6816 |
+
"epoch": 1.42,
|
| 6817 |
+
"eval_loss": 0.43925899267196655,
|
| 6818 |
+
"eval_runtime": 120.9998,
|
| 6819 |
+
"eval_samples_per_second": 8.264,
|
| 6820 |
+
"eval_steps_per_second": 4.132,
|
| 6821 |
+
"step": 7200
|
| 6822 |
+
},
|
| 6823 |
+
{
|
| 6824 |
+
"epoch": 1.42,
|
| 6825 |
+
"mmlu_eval_accuracy": 0.48394951792577023,
|
| 6826 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 6827 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 6828 |
+
"mmlu_eval_accuracy_astronomy": 0.5625,
|
| 6829 |
+
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364,
|
| 6830 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5862068965517241,
|
| 6831 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 6832 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 6833 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 6834 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 6835 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 6836 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 6837 |
+
"mmlu_eval_accuracy_computer_security": 0.2727272727272727,
|
| 6838 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615,
|
| 6839 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 6840 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 6841 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 6842 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 6843 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 6844 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
| 6845 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 6846 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 6847 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 6848 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 6849 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 6850 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256,
|
| 6851 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 6852 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.3076923076923077,
|
| 6853 |
+
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354,
|
| 6854 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 6855 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173,
|
| 6856 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909,
|
| 6857 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 6858 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 6859 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 6860 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 6861 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 6862 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 6863 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 6864 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 6865 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 6866 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 6867 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628,
|
| 6868 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 6869 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.26,
|
| 6870 |
+
"mmlu_eval_accuracy_nutrition": 0.5454545454545454,
|
| 6871 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 6872 |
+
"mmlu_eval_accuracy_prehistory": 0.42857142857142855,
|
| 6873 |
+
"mmlu_eval_accuracy_professional_accounting": 0.45161290322580644,
|
| 6874 |
+
"mmlu_eval_accuracy_professional_law": 0.35294117647058826,
|
| 6875 |
+
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744,
|
| 6876 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4927536231884058,
|
| 6877 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 6878 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 6879 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 6880 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454,
|
| 6881 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 6882 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 6883 |
+
"mmlu_loss": 1.127036721486958,
|
| 6884 |
+
"step": 7200
|
| 6885 |
+
},
|
| 6886 |
+
{
|
| 6887 |
+
"epoch": 1.42,
|
| 6888 |
+
"learning_rate": 0.0002,
|
| 6889 |
+
"loss": 0.3588,
|
| 6890 |
+
"step": 7210
|
| 6891 |
+
},
|
| 6892 |
+
{
|
| 6893 |
+
"epoch": 1.43,
|
| 6894 |
+
"learning_rate": 0.0002,
|
| 6895 |
+
"loss": 0.3902,
|
| 6896 |
+
"step": 7220
|
| 6897 |
+
},
|
| 6898 |
+
{
|
| 6899 |
+
"epoch": 1.43,
|
| 6900 |
+
"learning_rate": 0.0002,
|
| 6901 |
+
"loss": 0.3806,
|
| 6902 |
+
"step": 7230
|
| 6903 |
+
},
|
| 6904 |
+
{
|
| 6905 |
+
"epoch": 1.43,
|
| 6906 |
+
"learning_rate": 0.0002,
|
| 6907 |
+
"loss": 0.3985,
|
| 6908 |
+
"step": 7240
|
| 6909 |
+
},
|
| 6910 |
+
{
|
| 6911 |
+
"epoch": 1.43,
|
| 6912 |
+
"learning_rate": 0.0002,
|
| 6913 |
+
"loss": 0.3945,
|
| 6914 |
+
"step": 7250
|
| 6915 |
+
},
|
| 6916 |
+
{
|
| 6917 |
+
"epoch": 1.43,
|
| 6918 |
+
"learning_rate": 0.0002,
|
| 6919 |
+
"loss": 0.4605,
|
| 6920 |
+
"step": 7260
|
| 6921 |
+
},
|
| 6922 |
+
{
|
| 6923 |
+
"epoch": 1.44,
|
| 6924 |
+
"learning_rate": 0.0002,
|
| 6925 |
+
"loss": 0.3761,
|
| 6926 |
+
"step": 7270
|
| 6927 |
+
},
|
| 6928 |
+
{
|
| 6929 |
+
"epoch": 1.44,
|
| 6930 |
+
"learning_rate": 0.0002,
|
| 6931 |
+
"loss": 0.3667,
|
| 6932 |
+
"step": 7280
|
| 6933 |
+
},
|
| 6934 |
+
{
|
| 6935 |
+
"epoch": 1.44,
|
| 6936 |
+
"learning_rate": 0.0002,
|
| 6937 |
+
"loss": 0.3682,
|
| 6938 |
+
"step": 7290
|
| 6939 |
+
},
|
| 6940 |
+
{
|
| 6941 |
+
"epoch": 1.44,
|
| 6942 |
+
"learning_rate": 0.0002,
|
| 6943 |
+
"loss": 0.3361,
|
| 6944 |
+
"step": 7300
|
| 6945 |
+
},
|
| 6946 |
+
{
|
| 6947 |
+
"epoch": 1.44,
|
| 6948 |
+
"learning_rate": 0.0002,
|
| 6949 |
+
"loss": 0.3685,
|
| 6950 |
+
"step": 7310
|
| 6951 |
+
},
|
| 6952 |
+
{
|
| 6953 |
+
"epoch": 1.45,
|
| 6954 |
+
"learning_rate": 0.0002,
|
| 6955 |
+
"loss": 0.3448,
|
| 6956 |
+
"step": 7320
|
| 6957 |
+
},
|
| 6958 |
+
{
|
| 6959 |
+
"epoch": 1.45,
|
| 6960 |
+
"learning_rate": 0.0002,
|
| 6961 |
+
"loss": 0.3498,
|
| 6962 |
+
"step": 7330
|
| 6963 |
+
},
|
| 6964 |
+
{
|
| 6965 |
+
"epoch": 1.45,
|
| 6966 |
+
"learning_rate": 0.0002,
|
| 6967 |
+
"loss": 0.3714,
|
| 6968 |
+
"step": 7340
|
| 6969 |
+
},
|
| 6970 |
+
{
|
| 6971 |
+
"epoch": 1.45,
|
| 6972 |
+
"learning_rate": 0.0002,
|
| 6973 |
+
"loss": 0.3915,
|
| 6974 |
+
"step": 7350
|
| 6975 |
+
},
|
| 6976 |
+
{
|
| 6977 |
+
"epoch": 1.45,
|
| 6978 |
+
"learning_rate": 0.0002,
|
| 6979 |
+
"loss": 0.3867,
|
| 6980 |
+
"step": 7360
|
| 6981 |
+
},
|
| 6982 |
+
{
|
| 6983 |
+
"epoch": 1.46,
|
| 6984 |
+
"learning_rate": 0.0002,
|
| 6985 |
+
"loss": 0.3838,
|
| 6986 |
+
"step": 7370
|
| 6987 |
+
},
|
| 6988 |
+
{
|
| 6989 |
+
"epoch": 1.46,
|
| 6990 |
+
"learning_rate": 0.0002,
|
| 6991 |
+
"loss": 0.3923,
|
| 6992 |
+
"step": 7380
|
| 6993 |
+
},
|
| 6994 |
+
{
|
| 6995 |
+
"epoch": 1.46,
|
| 6996 |
+
"learning_rate": 0.0002,
|
| 6997 |
+
"loss": 0.3739,
|
| 6998 |
+
"step": 7390
|
| 6999 |
+
},
|
| 7000 |
+
{
|
| 7001 |
+
"epoch": 1.46,
|
| 7002 |
+
"learning_rate": 0.0002,
|
| 7003 |
+
"loss": 0.4029,
|
| 7004 |
+
"step": 7400
|
| 7005 |
+
},
|
| 7006 |
+
{
|
| 7007 |
+
"epoch": 1.46,
|
| 7008 |
+
"eval_loss": 0.43981724977493286,
|
| 7009 |
+
"eval_runtime": 121.0098,
|
| 7010 |
+
"eval_samples_per_second": 8.264,
|
| 7011 |
+
"eval_steps_per_second": 4.132,
|
| 7012 |
+
"step": 7400
|
| 7013 |
+
},
|
| 7014 |
+
{
|
| 7015 |
+
"epoch": 1.46,
|
| 7016 |
+
"mmlu_eval_accuracy": 0.47466190250303497,
|
| 7017 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 7018 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 7019 |
+
"mmlu_eval_accuracy_astronomy": 0.5625,
|
| 7020 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 7021 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 7022 |
+
"mmlu_eval_accuracy_college_biology": 0.25,
|
| 7023 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 7024 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 7025 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 7026 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
| 7027 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 7028 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 7029 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 7030 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 7031 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 7032 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 7033 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 7034 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 7035 |
+
"mmlu_eval_accuracy_high_school_biology": 0.34375,
|
| 7036 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
|
| 7037 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 7038 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 7039 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 7040 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 7041 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256,
|
| 7042 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 7043 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615,
|
| 7044 |
+
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882,
|
| 7045 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8,
|
| 7046 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 7047 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 7048 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 7049 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 7050 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 7051 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 7052 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 7053 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 7054 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 7055 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 7056 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7057 |
+
"mmlu_eval_accuracy_medical_genetics": 1.0,
|
| 7058 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186,
|
| 7059 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 7060 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 7061 |
+
"mmlu_eval_accuracy_nutrition": 0.5757575757575758,
|
| 7062 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 7063 |
+
"mmlu_eval_accuracy_prehistory": 0.34285714285714286,
|
| 7064 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 7065 |
+
"mmlu_eval_accuracy_professional_law": 0.3352941176470588,
|
| 7066 |
+
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
|
| 7067 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4492753623188406,
|
| 7068 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 7069 |
+
"mmlu_eval_accuracy_security_studies": 0.4074074074074074,
|
| 7070 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 7071 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454,
|
| 7072 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 7073 |
+
"mmlu_eval_accuracy_world_religions": 0.631578947368421,
|
| 7074 |
+
"mmlu_loss": 1.1439847480846137,
|
| 7075 |
+
"step": 7400
|
| 7076 |
+
},
|
| 7077 |
+
{
|
| 7078 |
+
"epoch": 1.46,
|
| 7079 |
+
"learning_rate": 0.0002,
|
| 7080 |
+
"loss": 0.3562,
|
| 7081 |
+
"step": 7410
|
| 7082 |
+
},
|
| 7083 |
+
{
|
| 7084 |
+
"epoch": 1.46,
|
| 7085 |
+
"learning_rate": 0.0002,
|
| 7086 |
+
"loss": 0.3582,
|
| 7087 |
+
"step": 7420
|
| 7088 |
+
},
|
| 7089 |
+
{
|
| 7090 |
+
"epoch": 1.47,
|
| 7091 |
+
"learning_rate": 0.0002,
|
| 7092 |
+
"loss": 0.4188,
|
| 7093 |
+
"step": 7430
|
| 7094 |
+
},
|
| 7095 |
+
{
|
| 7096 |
+
"epoch": 1.47,
|
| 7097 |
+
"learning_rate": 0.0002,
|
| 7098 |
+
"loss": 0.3889,
|
| 7099 |
+
"step": 7440
|
| 7100 |
+
},
|
| 7101 |
+
{
|
| 7102 |
+
"epoch": 1.47,
|
| 7103 |
+
"learning_rate": 0.0002,
|
| 7104 |
+
"loss": 0.3712,
|
| 7105 |
+
"step": 7450
|
| 7106 |
+
},
|
| 7107 |
+
{
|
| 7108 |
+
"epoch": 1.47,
|
| 7109 |
+
"learning_rate": 0.0002,
|
| 7110 |
+
"loss": 0.4247,
|
| 7111 |
+
"step": 7460
|
| 7112 |
+
},
|
| 7113 |
+
{
|
| 7114 |
+
"epoch": 1.47,
|
| 7115 |
+
"learning_rate": 0.0002,
|
| 7116 |
+
"loss": 0.3805,
|
| 7117 |
+
"step": 7470
|
| 7118 |
+
},
|
| 7119 |
+
{
|
| 7120 |
+
"epoch": 1.48,
|
| 7121 |
+
"learning_rate": 0.0002,
|
| 7122 |
+
"loss": 0.3322,
|
| 7123 |
+
"step": 7480
|
| 7124 |
+
},
|
| 7125 |
+
{
|
| 7126 |
+
"epoch": 1.48,
|
| 7127 |
+
"learning_rate": 0.0002,
|
| 7128 |
+
"loss": 0.3859,
|
| 7129 |
+
"step": 7490
|
| 7130 |
+
},
|
| 7131 |
+
{
|
| 7132 |
+
"epoch": 1.48,
|
| 7133 |
+
"learning_rate": 0.0002,
|
| 7134 |
+
"loss": 0.3529,
|
| 7135 |
+
"step": 7500
|
| 7136 |
+
},
|
| 7137 |
+
{
|
| 7138 |
+
"epoch": 1.48,
|
| 7139 |
+
"learning_rate": 0.0002,
|
| 7140 |
+
"loss": 0.3412,
|
| 7141 |
+
"step": 7510
|
| 7142 |
+
},
|
| 7143 |
+
{
|
| 7144 |
+
"epoch": 1.48,
|
| 7145 |
+
"learning_rate": 0.0002,
|
| 7146 |
+
"loss": 0.4411,
|
| 7147 |
+
"step": 7520
|
| 7148 |
+
},
|
| 7149 |
+
{
|
| 7150 |
+
"epoch": 1.49,
|
| 7151 |
+
"learning_rate": 0.0002,
|
| 7152 |
+
"loss": 0.3807,
|
| 7153 |
+
"step": 7530
|
| 7154 |
+
},
|
| 7155 |
+
{
|
| 7156 |
+
"epoch": 1.49,
|
| 7157 |
+
"learning_rate": 0.0002,
|
| 7158 |
+
"loss": 0.3794,
|
| 7159 |
+
"step": 7540
|
| 7160 |
+
},
|
| 7161 |
+
{
|
| 7162 |
+
"epoch": 1.49,
|
| 7163 |
+
"learning_rate": 0.0002,
|
| 7164 |
+
"loss": 0.355,
|
| 7165 |
+
"step": 7550
|
| 7166 |
+
},
|
| 7167 |
+
{
|
| 7168 |
+
"epoch": 1.49,
|
| 7169 |
+
"learning_rate": 0.0002,
|
| 7170 |
+
"loss": 0.404,
|
| 7171 |
+
"step": 7560
|
| 7172 |
+
},
|
| 7173 |
+
{
|
| 7174 |
+
"epoch": 1.49,
|
| 7175 |
+
"learning_rate": 0.0002,
|
| 7176 |
+
"loss": 0.4042,
|
| 7177 |
+
"step": 7570
|
| 7178 |
+
},
|
| 7179 |
+
{
|
| 7180 |
+
"epoch": 1.5,
|
| 7181 |
+
"learning_rate": 0.0002,
|
| 7182 |
+
"loss": 0.3696,
|
| 7183 |
+
"step": 7580
|
| 7184 |
+
},
|
| 7185 |
+
{
|
| 7186 |
+
"epoch": 1.5,
|
| 7187 |
+
"learning_rate": 0.0002,
|
| 7188 |
+
"loss": 0.3807,
|
| 7189 |
+
"step": 7590
|
| 7190 |
+
},
|
| 7191 |
+
{
|
| 7192 |
+
"epoch": 1.5,
|
| 7193 |
+
"learning_rate": 0.0002,
|
| 7194 |
+
"loss": 0.4191,
|
| 7195 |
+
"step": 7600
|
| 7196 |
+
},
|
| 7197 |
+
{
|
| 7198 |
+
"epoch": 1.5,
|
| 7199 |
+
"eval_loss": 0.43704837560653687,
|
| 7200 |
+
"eval_runtime": 120.9943,
|
| 7201 |
+
"eval_samples_per_second": 8.265,
|
| 7202 |
+
"eval_steps_per_second": 4.132,
|
| 7203 |
+
"step": 7600
|
| 7204 |
+
},
|
| 7205 |
+
{
|
| 7206 |
+
"epoch": 1.5,
|
| 7207 |
+
"mmlu_eval_accuracy": 0.4864390912539841,
|
| 7208 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 7209 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 7210 |
+
"mmlu_eval_accuracy_astronomy": 0.625,
|
| 7211 |
+
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364,
|
| 7212 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5862068965517241,
|
| 7213 |
+
"mmlu_eval_accuracy_college_biology": 0.25,
|
| 7214 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 7215 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 7216 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 7217 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
| 7218 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 7219 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 7220 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 7221 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 7222 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 7223 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 7224 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 7225 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 7226 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
| 7227 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365,
|
| 7228 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 7229 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 7230 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 7231 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 7232 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023,
|
| 7233 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 7234 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615,
|
| 7235 |
+
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882,
|
| 7236 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 7237 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 7238 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 7239 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 7240 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 7241 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 7242 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 7243 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 7244 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 7245 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 7246 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 7247 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7248 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 7249 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303,
|
| 7250 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 7251 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.27,
|
| 7252 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
| 7253 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 7254 |
+
"mmlu_eval_accuracy_prehistory": 0.37142857142857144,
|
| 7255 |
+
"mmlu_eval_accuracy_professional_accounting": 0.41935483870967744,
|
| 7256 |
+
"mmlu_eval_accuracy_professional_law": 0.34705882352941175,
|
| 7257 |
+
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
|
| 7258 |
+
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375,
|
| 7259 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 7260 |
+
"mmlu_eval_accuracy_security_studies": 0.4074074074074074,
|
| 7261 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 7262 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 7263 |
+
"mmlu_eval_accuracy_virology": 0.4444444444444444,
|
| 7264 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 7265 |
+
"mmlu_loss": 1.202333774018848,
|
| 7266 |
+
"step": 7600
|
| 7267 |
+
},
|
| 7268 |
+
{
|
| 7269 |
+
"epoch": 1.5,
|
| 7270 |
+
"learning_rate": 0.0002,
|
| 7271 |
+
"loss": 0.4169,
|
| 7272 |
+
"step": 7610
|
| 7273 |
+
},
|
| 7274 |
+
{
|
| 7275 |
+
"epoch": 1.5,
|
| 7276 |
+
"learning_rate": 0.0002,
|
| 7277 |
+
"loss": 0.4004,
|
| 7278 |
+
"step": 7620
|
| 7279 |
+
},
|
| 7280 |
+
{
|
| 7281 |
+
"epoch": 1.51,
|
| 7282 |
+
"learning_rate": 0.0002,
|
| 7283 |
+
"loss": 0.3925,
|
| 7284 |
+
"step": 7630
|
| 7285 |
+
},
|
| 7286 |
+
{
|
| 7287 |
+
"epoch": 1.51,
|
| 7288 |
+
"learning_rate": 0.0002,
|
| 7289 |
+
"loss": 0.3838,
|
| 7290 |
+
"step": 7640
|
| 7291 |
+
},
|
| 7292 |
+
{
|
| 7293 |
+
"epoch": 1.51,
|
| 7294 |
+
"learning_rate": 0.0002,
|
| 7295 |
+
"loss": 0.339,
|
| 7296 |
+
"step": 7650
|
| 7297 |
+
},
|
| 7298 |
+
{
|
| 7299 |
+
"epoch": 1.51,
|
| 7300 |
+
"learning_rate": 0.0002,
|
| 7301 |
+
"loss": 0.3929,
|
| 7302 |
+
"step": 7660
|
| 7303 |
+
},
|
| 7304 |
+
{
|
| 7305 |
+
"epoch": 1.51,
|
| 7306 |
+
"learning_rate": 0.0002,
|
| 7307 |
+
"loss": 0.4526,
|
| 7308 |
+
"step": 7670
|
| 7309 |
+
},
|
| 7310 |
+
{
|
| 7311 |
+
"epoch": 1.52,
|
| 7312 |
+
"learning_rate": 0.0002,
|
| 7313 |
+
"loss": 0.3797,
|
| 7314 |
+
"step": 7680
|
| 7315 |
+
},
|
| 7316 |
+
{
|
| 7317 |
+
"epoch": 1.52,
|
| 7318 |
+
"learning_rate": 0.0002,
|
| 7319 |
+
"loss": 0.4072,
|
| 7320 |
+
"step": 7690
|
| 7321 |
+
},
|
| 7322 |
+
{
|
| 7323 |
+
"epoch": 1.52,
|
| 7324 |
+
"learning_rate": 0.0002,
|
| 7325 |
+
"loss": 0.3355,
|
| 7326 |
+
"step": 7700
|
| 7327 |
+
},
|
| 7328 |
+
{
|
| 7329 |
+
"epoch": 1.52,
|
| 7330 |
+
"learning_rate": 0.0002,
|
| 7331 |
+
"loss": 0.3736,
|
| 7332 |
+
"step": 7710
|
| 7333 |
+
},
|
| 7334 |
+
{
|
| 7335 |
+
"epoch": 1.52,
|
| 7336 |
+
"learning_rate": 0.0002,
|
| 7337 |
+
"loss": 0.3589,
|
| 7338 |
+
"step": 7720
|
| 7339 |
+
},
|
| 7340 |
+
{
|
| 7341 |
+
"epoch": 1.53,
|
| 7342 |
+
"learning_rate": 0.0002,
|
| 7343 |
+
"loss": 0.3284,
|
| 7344 |
+
"step": 7730
|
| 7345 |
+
},
|
| 7346 |
+
{
|
| 7347 |
+
"epoch": 1.53,
|
| 7348 |
+
"learning_rate": 0.0002,
|
| 7349 |
+
"loss": 0.3473,
|
| 7350 |
+
"step": 7740
|
| 7351 |
+
},
|
| 7352 |
+
{
|
| 7353 |
+
"epoch": 1.53,
|
| 7354 |
+
"learning_rate": 0.0002,
|
| 7355 |
+
"loss": 0.3735,
|
| 7356 |
+
"step": 7750
|
| 7357 |
+
},
|
| 7358 |
+
{
|
| 7359 |
+
"epoch": 1.53,
|
| 7360 |
+
"learning_rate": 0.0002,
|
| 7361 |
+
"loss": 0.3869,
|
| 7362 |
+
"step": 7760
|
| 7363 |
+
},
|
| 7364 |
+
{
|
| 7365 |
+
"epoch": 1.53,
|
| 7366 |
+
"learning_rate": 0.0002,
|
| 7367 |
+
"loss": 0.367,
|
| 7368 |
+
"step": 7770
|
| 7369 |
+
},
|
| 7370 |
+
{
|
| 7371 |
+
"epoch": 1.54,
|
| 7372 |
+
"learning_rate": 0.0002,
|
| 7373 |
+
"loss": 0.4084,
|
| 7374 |
+
"step": 7780
|
| 7375 |
+
},
|
| 7376 |
+
{
|
| 7377 |
+
"epoch": 1.54,
|
| 7378 |
+
"learning_rate": 0.0002,
|
| 7379 |
+
"loss": 0.3827,
|
| 7380 |
+
"step": 7790
|
| 7381 |
+
},
|
| 7382 |
+
{
|
| 7383 |
+
"epoch": 1.54,
|
| 7384 |
+
"learning_rate": 0.0002,
|
| 7385 |
+
"loss": 0.4602,
|
| 7386 |
+
"step": 7800
|
| 7387 |
+
},
|
| 7388 |
+
{
|
| 7389 |
+
"epoch": 1.54,
|
| 7390 |
+
"eval_loss": 0.4351891577243805,
|
| 7391 |
+
"eval_runtime": 121.0193,
|
| 7392 |
+
"eval_samples_per_second": 8.263,
|
| 7393 |
+
"eval_steps_per_second": 4.132,
|
| 7394 |
+
"step": 7800
|
| 7395 |
+
},
|
| 7396 |
+
{
|
| 7397 |
+
"epoch": 1.54,
|
| 7398 |
+
"mmlu_eval_accuracy": 0.48877828979099663,
|
| 7399 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 7400 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 7401 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 7402 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 7403 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 7404 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 7405 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 7406 |
+
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
|
| 7407 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 7408 |
+
"mmlu_eval_accuracy_college_medicine": 0.5,
|
| 7409 |
+
"mmlu_eval_accuracy_college_physics": 0.5454545454545454,
|
| 7410 |
+
"mmlu_eval_accuracy_computer_security": 0.2727272727272727,
|
| 7411 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 7412 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 7413 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.3125,
|
| 7414 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3902439024390244,
|
| 7415 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 7416 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 7417 |
+
"mmlu_eval_accuracy_high_school_biology": 0.375,
|
| 7418 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 7419 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 7420 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 7421 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 7422 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 7423 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.27906976744186046,
|
| 7424 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
| 7425 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231,
|
| 7426 |
+
"mmlu_eval_accuracy_high_school_physics": 0.4117647058823529,
|
| 7427 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8166666666666667,
|
| 7428 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 7429 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 7430 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7692307692307693,
|
| 7431 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 7432 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 7433 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 7434 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 7435 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 7436 |
+
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365,
|
| 7437 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 7438 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7439 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 7440 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 7441 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 7442 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 7443 |
+
"mmlu_eval_accuracy_nutrition": 0.5757575757575758,
|
| 7444 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 7445 |
+
"mmlu_eval_accuracy_prehistory": 0.45714285714285713,
|
| 7446 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 7447 |
+
"mmlu_eval_accuracy_professional_law": 0.3235294117647059,
|
| 7448 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5483870967741935,
|
| 7449 |
+
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087,
|
| 7450 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 7451 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 7452 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 7453 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 7454 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 7455 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 7456 |
+
"mmlu_loss": 1.0244236340838686,
|
| 7457 |
+
"step": 7800
|
| 7458 |
+
},
|
| 7459 |
+
{
|
| 7460 |
+
"epoch": 1.54,
|
| 7461 |
+
"learning_rate": 0.0002,
|
| 7462 |
+
"loss": 0.3771,
|
| 7463 |
+
"step": 7810
|
| 7464 |
+
},
|
| 7465 |
+
{
|
| 7466 |
+
"epoch": 1.54,
|
| 7467 |
+
"learning_rate": 0.0002,
|
| 7468 |
+
"loss": 0.4082,
|
| 7469 |
+
"step": 7820
|
| 7470 |
+
},
|
| 7471 |
+
{
|
| 7472 |
+
"epoch": 1.55,
|
| 7473 |
+
"learning_rate": 0.0002,
|
| 7474 |
+
"loss": 0.4122,
|
| 7475 |
+
"step": 7830
|
| 7476 |
+
},
|
| 7477 |
+
{
|
| 7478 |
+
"epoch": 1.55,
|
| 7479 |
+
"learning_rate": 0.0002,
|
| 7480 |
+
"loss": 0.4006,
|
| 7481 |
+
"step": 7840
|
| 7482 |
+
},
|
| 7483 |
+
{
|
| 7484 |
+
"epoch": 1.55,
|
| 7485 |
+
"learning_rate": 0.0002,
|
| 7486 |
+
"loss": 0.4035,
|
| 7487 |
+
"step": 7850
|
| 7488 |
+
},
|
| 7489 |
+
{
|
| 7490 |
+
"epoch": 1.55,
|
| 7491 |
+
"learning_rate": 0.0002,
|
| 7492 |
+
"loss": 0.3887,
|
| 7493 |
+
"step": 7860
|
| 7494 |
+
},
|
| 7495 |
+
{
|
| 7496 |
+
"epoch": 1.55,
|
| 7497 |
+
"learning_rate": 0.0002,
|
| 7498 |
+
"loss": 0.3624,
|
| 7499 |
+
"step": 7870
|
| 7500 |
+
},
|
| 7501 |
+
{
|
| 7502 |
+
"epoch": 1.56,
|
| 7503 |
+
"learning_rate": 0.0002,
|
| 7504 |
+
"loss": 0.3508,
|
| 7505 |
+
"step": 7880
|
| 7506 |
+
},
|
| 7507 |
+
{
|
| 7508 |
+
"epoch": 1.56,
|
| 7509 |
+
"learning_rate": 0.0002,
|
| 7510 |
+
"loss": 0.3463,
|
| 7511 |
+
"step": 7890
|
| 7512 |
+
},
|
| 7513 |
+
{
|
| 7514 |
+
"epoch": 1.56,
|
| 7515 |
+
"learning_rate": 0.0002,
|
| 7516 |
+
"loss": 0.3644,
|
| 7517 |
+
"step": 7900
|
| 7518 |
+
},
|
| 7519 |
+
{
|
| 7520 |
+
"epoch": 1.56,
|
| 7521 |
+
"learning_rate": 0.0002,
|
| 7522 |
+
"loss": 0.428,
|
| 7523 |
+
"step": 7910
|
| 7524 |
+
},
|
| 7525 |
+
{
|
| 7526 |
+
"epoch": 1.56,
|
| 7527 |
+
"learning_rate": 0.0002,
|
| 7528 |
+
"loss": 0.3583,
|
| 7529 |
+
"step": 7920
|
| 7530 |
+
},
|
| 7531 |
+
{
|
| 7532 |
+
"epoch": 1.57,
|
| 7533 |
+
"learning_rate": 0.0002,
|
| 7534 |
+
"loss": 0.3895,
|
| 7535 |
+
"step": 7930
|
| 7536 |
+
},
|
| 7537 |
+
{
|
| 7538 |
+
"epoch": 1.57,
|
| 7539 |
+
"learning_rate": 0.0002,
|
| 7540 |
+
"loss": 0.379,
|
| 7541 |
+
"step": 7940
|
| 7542 |
+
},
|
| 7543 |
+
{
|
| 7544 |
+
"epoch": 1.57,
|
| 7545 |
+
"learning_rate": 0.0002,
|
| 7546 |
+
"loss": 0.3231,
|
| 7547 |
+
"step": 7950
|
| 7548 |
+
},
|
| 7549 |
+
{
|
| 7550 |
+
"epoch": 1.57,
|
| 7551 |
+
"learning_rate": 0.0002,
|
| 7552 |
+
"loss": 0.3399,
|
| 7553 |
+
"step": 7960
|
| 7554 |
+
},
|
| 7555 |
+
{
|
| 7556 |
+
"epoch": 1.57,
|
| 7557 |
+
"learning_rate": 0.0002,
|
| 7558 |
+
"loss": 0.4171,
|
| 7559 |
+
"step": 7970
|
| 7560 |
+
},
|
| 7561 |
+
{
|
| 7562 |
+
"epoch": 1.58,
|
| 7563 |
+
"learning_rate": 0.0002,
|
| 7564 |
+
"loss": 0.4399,
|
| 7565 |
+
"step": 7980
|
| 7566 |
+
},
|
| 7567 |
+
{
|
| 7568 |
+
"epoch": 1.58,
|
| 7569 |
+
"learning_rate": 0.0002,
|
| 7570 |
+
"loss": 0.3888,
|
| 7571 |
+
"step": 7990
|
| 7572 |
+
},
|
| 7573 |
+
{
|
| 7574 |
+
"epoch": 1.58,
|
| 7575 |
+
"learning_rate": 0.0002,
|
| 7576 |
+
"loss": 0.3381,
|
| 7577 |
+
"step": 8000
|
| 7578 |
+
},
|
| 7579 |
+
{
|
| 7580 |
+
"epoch": 1.58,
|
| 7581 |
+
"eval_loss": 0.43523940443992615,
|
| 7582 |
+
"eval_runtime": 120.9711,
|
| 7583 |
+
"eval_samples_per_second": 8.266,
|
| 7584 |
+
"eval_steps_per_second": 4.133,
|
| 7585 |
+
"step": 8000
|
| 7586 |
+
},
|
| 7587 |
+
{
|
| 7588 |
+
"epoch": 1.58,
|
| 7589 |
+
"mmlu_eval_accuracy": 0.5000652378894127,
|
| 7590 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 7591 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 7592 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 7593 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 7594 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 7595 |
+
"mmlu_eval_accuracy_college_biology": 0.3125,
|
| 7596 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 7597 |
+
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
|
| 7598 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 7599 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 7600 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 7601 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 7602 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464,
|
| 7603 |
+
"mmlu_eval_accuracy_econometrics": 0.08333333333333333,
|
| 7604 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.3125,
|
| 7605 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.4146341463414634,
|
| 7606 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 7607 |
+
"mmlu_eval_accuracy_global_facts": 0.3,
|
| 7608 |
+
"mmlu_eval_accuracy_high_school_biology": 0.40625,
|
| 7609 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
|
| 7610 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 7611 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 7612 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636,
|
| 7613 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 7614 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723,
|
| 7615 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.3103448275862069,
|
| 7616 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615,
|
| 7617 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 7618 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8333333333333334,
|
| 7619 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
| 7620 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273,
|
| 7621 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7692307692307693,
|
| 7622 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 7623 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 7624 |
+
"mmlu_eval_accuracy_international_law": 0.7692307692307693,
|
| 7625 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 7626 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 7627 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 7628 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 7629 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7630 |
+
"mmlu_eval_accuracy_medical_genetics": 1.0,
|
| 7631 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 7632 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 7633 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 7634 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
| 7635 |
+
"mmlu_eval_accuracy_philosophy": 0.4117647058823529,
|
| 7636 |
+
"mmlu_eval_accuracy_prehistory": 0.45714285714285713,
|
| 7637 |
+
"mmlu_eval_accuracy_professional_accounting": 0.41935483870967744,
|
| 7638 |
+
"mmlu_eval_accuracy_professional_law": 0.3176470588235294,
|
| 7639 |
+
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
|
| 7640 |
+
"mmlu_eval_accuracy_professional_psychology": 0.43478260869565216,
|
| 7641 |
+
"mmlu_eval_accuracy_public_relations": 0.6666666666666666,
|
| 7642 |
+
"mmlu_eval_accuracy_security_studies": 0.5185185185185185,
|
| 7643 |
+
"mmlu_eval_accuracy_sociology": 0.6818181818181818,
|
| 7644 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 7645 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 7646 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 7647 |
+
"mmlu_loss": 1.1098531644120229,
|
| 7648 |
+
"step": 8000
|
| 7649 |
}
|
| 7650 |
],
|
| 7651 |
"max_steps": 10000,
|
| 7652 |
"num_train_epochs": 2,
|
| 7653 |
+
"total_flos": 7.39508436215464e+17,
|
| 7654 |
"trial_name": null,
|
| 7655 |
"trial_params": null
|
| 7656 |
}
|
{checkpoint-5800 β checkpoint-8000}/training_args.bin
RENAMED
|
File without changes
|