Training in progress, step 1423, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 25271744
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c4b64f69ba473d1fa5478151127c8bee278c022620b0ec068bdaf8740594ecf3
|
3 |
size 25271744
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 13685836
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:23e2133a12bb5cfe6511e7d0dfee94fd28854e1801ecec497e2046454fdecf8c
|
3 |
size 13685836
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:825f03161738be6b8066e69966e969a50555ff1b945980f74e6b68715ceac98e
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9d85461961714f3b571d1c5a9864a16b570870cca12155a77c936abb3216a83
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 356,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -7515,6 +7515,2491 @@
|
|
7515 |
"eval_samples_per_second": 48.647,
|
7516 |
"eval_steps_per_second": 24.324,
|
7517 |
"step": 1068
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7518 |
}
|
7519 |
],
|
7520 |
"logging_steps": 1,
|
@@ -7529,12 +10014,12 @@
|
|
7529 |
"should_evaluate": false,
|
7530 |
"should_log": false,
|
7531 |
"should_save": true,
|
7532 |
-
"should_training_stop":
|
7533 |
},
|
7534 |
"attributes": {}
|
7535 |
}
|
7536 |
},
|
7537 |
-
"total_flos":
|
7538 |
"train_batch_size": 2,
|
7539 |
"trial_name": null,
|
7540 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.04857069715846062,
|
5 |
"eval_steps": 356,
|
6 |
+
"global_step": 1423,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
7515 |
"eval_samples_per_second": 48.647,
|
7516 |
"eval_steps_per_second": 24.324,
|
7517 |
"step": 1068
|
7518 |
+
},
|
7519 |
+
{
|
7520 |
+
"epoch": 0.03648775492789487,
|
7521 |
+
"grad_norm": 0.8495626449584961,
|
7522 |
+
"learning_rate": 2.9407331041607566e-05,
|
7523 |
+
"loss": 2.112,
|
7524 |
+
"step": 1069
|
7525 |
+
},
|
7526 |
+
{
|
7527 |
+
"epoch": 0.03652188753306596,
|
7528 |
+
"grad_norm": 0.7763845324516296,
|
7529 |
+
"learning_rate": 2.925002917124886e-05,
|
7530 |
+
"loss": 1.3919,
|
7531 |
+
"step": 1070
|
7532 |
+
},
|
7533 |
+
{
|
7534 |
+
"epoch": 0.03655602013823705,
|
7535 |
+
"grad_norm": 0.745794415473938,
|
7536 |
+
"learning_rate": 2.9093077037831827e-05,
|
7537 |
+
"loss": 2.1224,
|
7538 |
+
"step": 1071
|
7539 |
+
},
|
7540 |
+
{
|
7541 |
+
"epoch": 0.03659015274340814,
|
7542 |
+
"grad_norm": 0.997823178768158,
|
7543 |
+
"learning_rate": 2.8936475417214794e-05,
|
7544 |
+
"loss": 1.5648,
|
7545 |
+
"step": 1072
|
7546 |
+
},
|
7547 |
+
{
|
7548 |
+
"epoch": 0.03662428534857923,
|
7549 |
+
"grad_norm": 1.092645287513733,
|
7550 |
+
"learning_rate": 2.878022508352355e-05,
|
7551 |
+
"loss": 1.8485,
|
7552 |
+
"step": 1073
|
7553 |
+
},
|
7554 |
+
{
|
7555 |
+
"epoch": 0.03665841795375032,
|
7556 |
+
"grad_norm": 1.01222825050354,
|
7557 |
+
"learning_rate": 2.862432680914725e-05,
|
7558 |
+
"loss": 2.1326,
|
7559 |
+
"step": 1074
|
7560 |
+
},
|
7561 |
+
{
|
7562 |
+
"epoch": 0.03669255055892141,
|
7563 |
+
"grad_norm": 1.2714506387710571,
|
7564 |
+
"learning_rate": 2.846878136473472e-05,
|
7565 |
+
"loss": 2.1633,
|
7566 |
+
"step": 1075
|
7567 |
+
},
|
7568 |
+
{
|
7569 |
+
"epoch": 0.0367266831640925,
|
7570 |
+
"grad_norm": 1.1034311056137085,
|
7571 |
+
"learning_rate": 2.8313589519190843e-05,
|
7572 |
+
"loss": 1.8863,
|
7573 |
+
"step": 1076
|
7574 |
+
},
|
7575 |
+
{
|
7576 |
+
"epoch": 0.036760815769263586,
|
7577 |
+
"grad_norm": 0.8930706977844238,
|
7578 |
+
"learning_rate": 2.815875203967233e-05,
|
7579 |
+
"loss": 2.3316,
|
7580 |
+
"step": 1077
|
7581 |
+
},
|
7582 |
+
{
|
7583 |
+
"epoch": 0.03679494837443468,
|
7584 |
+
"grad_norm": 1.083560824394226,
|
7585 |
+
"learning_rate": 2.8004269691584252e-05,
|
7586 |
+
"loss": 1.5281,
|
7587 |
+
"step": 1078
|
7588 |
+
},
|
7589 |
+
{
|
7590 |
+
"epoch": 0.03682908097960577,
|
7591 |
+
"grad_norm": 0.8314143419265747,
|
7592 |
+
"learning_rate": 2.7850143238576198e-05,
|
7593 |
+
"loss": 1.1883,
|
7594 |
+
"step": 1079
|
7595 |
+
},
|
7596 |
+
{
|
7597 |
+
"epoch": 0.03686321358477686,
|
7598 |
+
"grad_norm": 0.9249014258384705,
|
7599 |
+
"learning_rate": 2.7696373442538405e-05,
|
7600 |
+
"loss": 2.0878,
|
7601 |
+
"step": 1080
|
7602 |
+
},
|
7603 |
+
{
|
7604 |
+
"epoch": 0.03689734618994795,
|
7605 |
+
"grad_norm": 1.293502688407898,
|
7606 |
+
"learning_rate": 2.7542961063598104e-05,
|
7607 |
+
"loss": 2.1372,
|
7608 |
+
"step": 1081
|
7609 |
+
},
|
7610 |
+
{
|
7611 |
+
"epoch": 0.03693147879511904,
|
7612 |
+
"grad_norm": 0.825874388217926,
|
7613 |
+
"learning_rate": 2.738990686011572e-05,
|
7614 |
+
"loss": 1.4794,
|
7615 |
+
"step": 1082
|
7616 |
+
},
|
7617 |
+
{
|
7618 |
+
"epoch": 0.03696561140029013,
|
7619 |
+
"grad_norm": 1.0877054929733276,
|
7620 |
+
"learning_rate": 2.7237211588681065e-05,
|
7621 |
+
"loss": 1.8561,
|
7622 |
+
"step": 1083
|
7623 |
+
},
|
7624 |
+
{
|
7625 |
+
"epoch": 0.036999744005461216,
|
7626 |
+
"grad_norm": 1.0812783241271973,
|
7627 |
+
"learning_rate": 2.708487600410966e-05,
|
7628 |
+
"loss": 1.7845,
|
7629 |
+
"step": 1084
|
7630 |
+
},
|
7631 |
+
{
|
7632 |
+
"epoch": 0.03703387661063231,
|
7633 |
+
"grad_norm": 1.1724249124526978,
|
7634 |
+
"learning_rate": 2.6932900859439047e-05,
|
7635 |
+
"loss": 1.9329,
|
7636 |
+
"step": 1085
|
7637 |
+
},
|
7638 |
+
{
|
7639 |
+
"epoch": 0.037068009215803395,
|
7640 |
+
"grad_norm": 0.9307939410209656,
|
7641 |
+
"learning_rate": 2.6781286905924928e-05,
|
7642 |
+
"loss": 1.7553,
|
7643 |
+
"step": 1086
|
7644 |
+
},
|
7645 |
+
{
|
7646 |
+
"epoch": 0.03710214182097449,
|
7647 |
+
"grad_norm": 0.9620274901390076,
|
7648 |
+
"learning_rate": 2.6630034893037614e-05,
|
7649 |
+
"loss": 2.1235,
|
7650 |
+
"step": 1087
|
7651 |
+
},
|
7652 |
+
{
|
7653 |
+
"epoch": 0.037136274426145574,
|
7654 |
+
"grad_norm": 1.1237571239471436,
|
7655 |
+
"learning_rate": 2.647914556845823e-05,
|
7656 |
+
"loss": 1.9138,
|
7657 |
+
"step": 1088
|
7658 |
+
},
|
7659 |
+
{
|
7660 |
+
"epoch": 0.03717040703131667,
|
7661 |
+
"grad_norm": 1.2988909482955933,
|
7662 |
+
"learning_rate": 2.6328619678074983e-05,
|
7663 |
+
"loss": 2.3217,
|
7664 |
+
"step": 1089
|
7665 |
+
},
|
7666 |
+
{
|
7667 |
+
"epoch": 0.03720453963648775,
|
7668 |
+
"grad_norm": 1.2431913614273071,
|
7669 |
+
"learning_rate": 2.617845796597954e-05,
|
7670 |
+
"loss": 1.8765,
|
7671 |
+
"step": 1090
|
7672 |
+
},
|
7673 |
+
{
|
7674 |
+
"epoch": 0.037238672241658846,
|
7675 |
+
"grad_norm": 1.3412621021270752,
|
7676 |
+
"learning_rate": 2.6028661174463387e-05,
|
7677 |
+
"loss": 2.0979,
|
7678 |
+
"step": 1091
|
7679 |
+
},
|
7680 |
+
{
|
7681 |
+
"epoch": 0.03727280484682993,
|
7682 |
+
"grad_norm": 1.2948389053344727,
|
7683 |
+
"learning_rate": 2.5879230044014004e-05,
|
7684 |
+
"loss": 2.159,
|
7685 |
+
"step": 1092
|
7686 |
+
},
|
7687 |
+
{
|
7688 |
+
"epoch": 0.037306937452001025,
|
7689 |
+
"grad_norm": 1.146852731704712,
|
7690 |
+
"learning_rate": 2.5730165313311393e-05,
|
7691 |
+
"loss": 2.2728,
|
7692 |
+
"step": 1093
|
7693 |
+
},
|
7694 |
+
{
|
7695 |
+
"epoch": 0.03734107005717211,
|
7696 |
+
"grad_norm": 1.1937923431396484,
|
7697 |
+
"learning_rate": 2.5581467719224338e-05,
|
7698 |
+
"loss": 1.9402,
|
7699 |
+
"step": 1094
|
7700 |
+
},
|
7701 |
+
{
|
7702 |
+
"epoch": 0.037375202662343204,
|
7703 |
+
"grad_norm": 0.8735892176628113,
|
7704 |
+
"learning_rate": 2.5433137996806722e-05,
|
7705 |
+
"loss": 1.9386,
|
7706 |
+
"step": 1095
|
7707 |
+
},
|
7708 |
+
{
|
7709 |
+
"epoch": 0.03740933526751429,
|
7710 |
+
"grad_norm": 1.1612051725387573,
|
7711 |
+
"learning_rate": 2.5285176879293902e-05,
|
7712 |
+
"loss": 2.0162,
|
7713 |
+
"step": 1096
|
7714 |
+
},
|
7715 |
+
{
|
7716 |
+
"epoch": 0.03744346787268538,
|
7717 |
+
"grad_norm": 1.1352860927581787,
|
7718 |
+
"learning_rate": 2.5137585098099236e-05,
|
7719 |
+
"loss": 2.3942,
|
7720 |
+
"step": 1097
|
7721 |
+
},
|
7722 |
+
{
|
7723 |
+
"epoch": 0.03747760047785647,
|
7724 |
+
"grad_norm": 0.774394154548645,
|
7725 |
+
"learning_rate": 2.499036338281021e-05,
|
7726 |
+
"loss": 2.5435,
|
7727 |
+
"step": 1098
|
7728 |
+
},
|
7729 |
+
{
|
7730 |
+
"epoch": 0.03751173308302756,
|
7731 |
+
"grad_norm": 0.9609479904174805,
|
7732 |
+
"learning_rate": 2.484351246118507e-05,
|
7733 |
+
"loss": 1.7714,
|
7734 |
+
"step": 1099
|
7735 |
+
},
|
7736 |
+
{
|
7737 |
+
"epoch": 0.03754586568819865,
|
7738 |
+
"grad_norm": 1.1031779050827026,
|
7739 |
+
"learning_rate": 2.4697033059149132e-05,
|
7740 |
+
"loss": 1.7542,
|
7741 |
+
"step": 1100
|
7742 |
+
},
|
7743 |
+
{
|
7744 |
+
"epoch": 0.03757999829336974,
|
7745 |
+
"grad_norm": 0.8433243632316589,
|
7746 |
+
"learning_rate": 2.45509259007911e-05,
|
7747 |
+
"loss": 1.6019,
|
7748 |
+
"step": 1101
|
7749 |
+
},
|
7750 |
+
{
|
7751 |
+
"epoch": 0.037614130898540835,
|
7752 |
+
"grad_norm": 1.3329660892486572,
|
7753 |
+
"learning_rate": 2.440519170835962e-05,
|
7754 |
+
"loss": 2.484,
|
7755 |
+
"step": 1102
|
7756 |
+
},
|
7757 |
+
{
|
7758 |
+
"epoch": 0.03764826350371192,
|
7759 |
+
"grad_norm": 1.0958373546600342,
|
7760 |
+
"learning_rate": 2.4259831202259697e-05,
|
7761 |
+
"loss": 1.9741,
|
7762 |
+
"step": 1103
|
7763 |
+
},
|
7764 |
+
{
|
7765 |
+
"epoch": 0.037682396108883014,
|
7766 |
+
"grad_norm": 1.1962236166000366,
|
7767 |
+
"learning_rate": 2.411484510104901e-05,
|
7768 |
+
"loss": 2.5369,
|
7769 |
+
"step": 1104
|
7770 |
+
},
|
7771 |
+
{
|
7772 |
+
"epoch": 0.0377165287140541,
|
7773 |
+
"grad_norm": 1.0122778415679932,
|
7774 |
+
"learning_rate": 2.3970234121434553e-05,
|
7775 |
+
"loss": 1.6497,
|
7776 |
+
"step": 1105
|
7777 |
+
},
|
7778 |
+
{
|
7779 |
+
"epoch": 0.03775066131922519,
|
7780 |
+
"grad_norm": 0.8308652639389038,
|
7781 |
+
"learning_rate": 2.382599897826896e-05,
|
7782 |
+
"loss": 2.1402,
|
7783 |
+
"step": 1106
|
7784 |
+
},
|
7785 |
+
{
|
7786 |
+
"epoch": 0.03778479392439628,
|
7787 |
+
"grad_norm": 1.2457090616226196,
|
7788 |
+
"learning_rate": 2.368214038454695e-05,
|
7789 |
+
"loss": 2.2239,
|
7790 |
+
"step": 1107
|
7791 |
+
},
|
7792 |
+
{
|
7793 |
+
"epoch": 0.03781892652956737,
|
7794 |
+
"grad_norm": 1.0395004749298096,
|
7795 |
+
"learning_rate": 2.353865905140187e-05,
|
7796 |
+
"loss": 2.1321,
|
7797 |
+
"step": 1108
|
7798 |
+
},
|
7799 |
+
{
|
7800 |
+
"epoch": 0.03785305913473846,
|
7801 |
+
"grad_norm": 1.3762571811676025,
|
7802 |
+
"learning_rate": 2.339555568810221e-05,
|
7803 |
+
"loss": 2.4066,
|
7804 |
+
"step": 1109
|
7805 |
+
},
|
7806 |
+
{
|
7807 |
+
"epoch": 0.03788719173990955,
|
7808 |
+
"grad_norm": 1.2392860651016235,
|
7809 |
+
"learning_rate": 2.325283100204798e-05,
|
7810 |
+
"loss": 1.7171,
|
7811 |
+
"step": 1110
|
7812 |
+
},
|
7813 |
+
{
|
7814 |
+
"epoch": 0.03792132434508064,
|
7815 |
+
"grad_norm": 0.9891599416732788,
|
7816 |
+
"learning_rate": 2.3110485698767335e-05,
|
7817 |
+
"loss": 2.123,
|
7818 |
+
"step": 1111
|
7819 |
+
},
|
7820 |
+
{
|
7821 |
+
"epoch": 0.03795545695025173,
|
7822 |
+
"grad_norm": 1.3409411907196045,
|
7823 |
+
"learning_rate": 2.296852048191306e-05,
|
7824 |
+
"loss": 1.823,
|
7825 |
+
"step": 1112
|
7826 |
+
},
|
7827 |
+
{
|
7828 |
+
"epoch": 0.037989589555422816,
|
7829 |
+
"grad_norm": 0.9964379072189331,
|
7830 |
+
"learning_rate": 2.2826936053258984e-05,
|
7831 |
+
"loss": 1.6804,
|
7832 |
+
"step": 1113
|
7833 |
+
},
|
7834 |
+
{
|
7835 |
+
"epoch": 0.03802372216059391,
|
7836 |
+
"grad_norm": 0.8296756148338318,
|
7837 |
+
"learning_rate": 2.2685733112696604e-05,
|
7838 |
+
"loss": 1.9707,
|
7839 |
+
"step": 1114
|
7840 |
+
},
|
7841 |
+
{
|
7842 |
+
"epoch": 0.038057854765764995,
|
7843 |
+
"grad_norm": 0.7283208966255188,
|
7844 |
+
"learning_rate": 2.2544912358231705e-05,
|
7845 |
+
"loss": 2.1879,
|
7846 |
+
"step": 1115
|
7847 |
+
},
|
7848 |
+
{
|
7849 |
+
"epoch": 0.03809198737093609,
|
7850 |
+
"grad_norm": 1.0764633417129517,
|
7851 |
+
"learning_rate": 2.24044744859807e-05,
|
7852 |
+
"loss": 2.1348,
|
7853 |
+
"step": 1116
|
7854 |
+
},
|
7855 |
+
{
|
7856 |
+
"epoch": 0.038126119976107174,
|
7857 |
+
"grad_norm": 0.8397589921951294,
|
7858 |
+
"learning_rate": 2.226442019016739e-05,
|
7859 |
+
"loss": 1.5873,
|
7860 |
+
"step": 1117
|
7861 |
+
},
|
7862 |
+
{
|
7863 |
+
"epoch": 0.03816025258127827,
|
7864 |
+
"grad_norm": 1.3108656406402588,
|
7865 |
+
"learning_rate": 2.2124750163119455e-05,
|
7866 |
+
"loss": 2.5798,
|
7867 |
+
"step": 1118
|
7868 |
+
},
|
7869 |
+
{
|
7870 |
+
"epoch": 0.03819438518644935,
|
7871 |
+
"grad_norm": 0.9452148675918579,
|
7872 |
+
"learning_rate": 2.1985465095264978e-05,
|
7873 |
+
"loss": 2.3812,
|
7874 |
+
"step": 1119
|
7875 |
+
},
|
7876 |
+
{
|
7877 |
+
"epoch": 0.038228517791620446,
|
7878 |
+
"grad_norm": 1.1386322975158691,
|
7879 |
+
"learning_rate": 2.184656567512907e-05,
|
7880 |
+
"loss": 2.1916,
|
7881 |
+
"step": 1120
|
7882 |
+
},
|
7883 |
+
{
|
7884 |
+
"epoch": 0.03826265039679153,
|
7885 |
+
"grad_norm": 1.1536915302276611,
|
7886 |
+
"learning_rate": 2.170805258933055e-05,
|
7887 |
+
"loss": 1.9991,
|
7888 |
+
"step": 1121
|
7889 |
+
},
|
7890 |
+
{
|
7891 |
+
"epoch": 0.038296783001962625,
|
7892 |
+
"grad_norm": 1.2494447231292725,
|
7893 |
+
"learning_rate": 2.156992652257839e-05,
|
7894 |
+
"loss": 1.8303,
|
7895 |
+
"step": 1122
|
7896 |
+
},
|
7897 |
+
{
|
7898 |
+
"epoch": 0.03833091560713371,
|
7899 |
+
"grad_norm": 0.8540000915527344,
|
7900 |
+
"learning_rate": 2.1432188157668487e-05,
|
7901 |
+
"loss": 2.2355,
|
7902 |
+
"step": 1123
|
7903 |
+
},
|
7904 |
+
{
|
7905 |
+
"epoch": 0.038365048212304804,
|
7906 |
+
"grad_norm": 0.9506471753120422,
|
7907 |
+
"learning_rate": 2.1294838175480235e-05,
|
7908 |
+
"loss": 1.8488,
|
7909 |
+
"step": 1124
|
7910 |
+
},
|
7911 |
+
{
|
7912 |
+
"epoch": 0.03839918081747589,
|
7913 |
+
"grad_norm": 0.8886460661888123,
|
7914 |
+
"learning_rate": 2.1157877254973058e-05,
|
7915 |
+
"loss": 2.0962,
|
7916 |
+
"step": 1125
|
7917 |
+
},
|
7918 |
+
{
|
7919 |
+
"epoch": 0.038433313422646984,
|
7920 |
+
"grad_norm": 1.0013777017593384,
|
7921 |
+
"learning_rate": 2.1021306073183167e-05,
|
7922 |
+
"loss": 2.0507,
|
7923 |
+
"step": 1126
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 0.03846744602781808,
|
7927 |
+
"grad_norm": 1.29741632938385,
|
7928 |
+
"learning_rate": 2.0885125305220265e-05,
|
7929 |
+
"loss": 2.3869,
|
7930 |
+
"step": 1127
|
7931 |
+
},
|
7932 |
+
{
|
7933 |
+
"epoch": 0.03850157863298916,
|
7934 |
+
"grad_norm": 1.0566473007202148,
|
7935 |
+
"learning_rate": 2.0749335624263997e-05,
|
7936 |
+
"loss": 2.2732,
|
7937 |
+
"step": 1128
|
7938 |
+
},
|
7939 |
+
{
|
7940 |
+
"epoch": 0.038535711238160256,
|
7941 |
+
"grad_norm": 1.1278867721557617,
|
7942 |
+
"learning_rate": 2.061393770156088e-05,
|
7943 |
+
"loss": 2.4973,
|
7944 |
+
"step": 1129
|
7945 |
+
},
|
7946 |
+
{
|
7947 |
+
"epoch": 0.03856984384333134,
|
7948 |
+
"grad_norm": 1.0199474096298218,
|
7949 |
+
"learning_rate": 2.047893220642081e-05,
|
7950 |
+
"loss": 1.7996,
|
7951 |
+
"step": 1130
|
7952 |
+
},
|
7953 |
+
{
|
7954 |
+
"epoch": 0.038603976448502435,
|
7955 |
+
"grad_norm": 1.2657079696655273,
|
7956 |
+
"learning_rate": 2.0344319806213775e-05,
|
7957 |
+
"loss": 2.301,
|
7958 |
+
"step": 1131
|
7959 |
+
},
|
7960 |
+
{
|
7961 |
+
"epoch": 0.03863810905367352,
|
7962 |
+
"grad_norm": 0.9719268679618835,
|
7963 |
+
"learning_rate": 2.0210101166366603e-05,
|
7964 |
+
"loss": 1.9794,
|
7965 |
+
"step": 1132
|
7966 |
+
},
|
7967 |
+
{
|
7968 |
+
"epoch": 0.038672241658844614,
|
7969 |
+
"grad_norm": 1.002723217010498,
|
7970 |
+
"learning_rate": 2.0076276950359685e-05,
|
7971 |
+
"loss": 2.0431,
|
7972 |
+
"step": 1133
|
7973 |
+
},
|
7974 |
+
{
|
7975 |
+
"epoch": 0.0387063742640157,
|
7976 |
+
"grad_norm": 1.132379174232483,
|
7977 |
+
"learning_rate": 1.994284781972361e-05,
|
7978 |
+
"loss": 2.1223,
|
7979 |
+
"step": 1134
|
7980 |
+
},
|
7981 |
+
{
|
7982 |
+
"epoch": 0.03874050686918679,
|
7983 |
+
"grad_norm": 1.3117680549621582,
|
7984 |
+
"learning_rate": 1.9809814434036e-05,
|
7985 |
+
"loss": 2.2593,
|
7986 |
+
"step": 1135
|
7987 |
+
},
|
7988 |
+
{
|
7989 |
+
"epoch": 0.03877463947435788,
|
7990 |
+
"grad_norm": 1.3985964059829712,
|
7991 |
+
"learning_rate": 1.9677177450918205e-05,
|
7992 |
+
"loss": 1.9044,
|
7993 |
+
"step": 1136
|
7994 |
+
},
|
7995 |
+
{
|
7996 |
+
"epoch": 0.03880877207952897,
|
7997 |
+
"grad_norm": 0.9443401098251343,
|
7998 |
+
"learning_rate": 1.954493752603199e-05,
|
7999 |
+
"loss": 2.1412,
|
8000 |
+
"step": 1137
|
8001 |
+
},
|
8002 |
+
{
|
8003 |
+
"epoch": 0.03884290468470006,
|
8004 |
+
"grad_norm": 0.7880548238754272,
|
8005 |
+
"learning_rate": 1.9413095313076356e-05,
|
8006 |
+
"loss": 1.7254,
|
8007 |
+
"step": 1138
|
8008 |
+
},
|
8009 |
+
{
|
8010 |
+
"epoch": 0.03887703728987115,
|
8011 |
+
"grad_norm": 0.9481159448623657,
|
8012 |
+
"learning_rate": 1.9281651463784367e-05,
|
8013 |
+
"loss": 1.993,
|
8014 |
+
"step": 1139
|
8015 |
+
},
|
8016 |
+
{
|
8017 |
+
"epoch": 0.03891116989504224,
|
8018 |
+
"grad_norm": 0.9989725947380066,
|
8019 |
+
"learning_rate": 1.9150606627919797e-05,
|
8020 |
+
"loss": 2.2459,
|
8021 |
+
"step": 1140
|
8022 |
+
},
|
8023 |
+
{
|
8024 |
+
"epoch": 0.03894530250021333,
|
8025 |
+
"grad_norm": 1.1189355850219727,
|
8026 |
+
"learning_rate": 1.9019961453274006e-05,
|
8027 |
+
"loss": 2.1346,
|
8028 |
+
"step": 1141
|
8029 |
+
},
|
8030 |
+
{
|
8031 |
+
"epoch": 0.038979435105384416,
|
8032 |
+
"grad_norm": 1.1751000881195068,
|
8033 |
+
"learning_rate": 1.8889716585662764e-05,
|
8034 |
+
"loss": 2.5638,
|
8035 |
+
"step": 1142
|
8036 |
+
},
|
8037 |
+
{
|
8038 |
+
"epoch": 0.03901356771055551,
|
8039 |
+
"grad_norm": 1.0529948472976685,
|
8040 |
+
"learning_rate": 1.8759872668922938e-05,
|
8041 |
+
"loss": 1.9605,
|
8042 |
+
"step": 1143
|
8043 |
+
},
|
8044 |
+
{
|
8045 |
+
"epoch": 0.039047700315726595,
|
8046 |
+
"grad_norm": 1.0072518587112427,
|
8047 |
+
"learning_rate": 1.863043034490938e-05,
|
8048 |
+
"loss": 2.162,
|
8049 |
+
"step": 1144
|
8050 |
+
},
|
8051 |
+
{
|
8052 |
+
"epoch": 0.03908183292089769,
|
8053 |
+
"grad_norm": 0.8361076712608337,
|
8054 |
+
"learning_rate": 1.8501390253491856e-05,
|
8055 |
+
"loss": 2.0935,
|
8056 |
+
"step": 1145
|
8057 |
+
},
|
8058 |
+
{
|
8059 |
+
"epoch": 0.039115965526068774,
|
8060 |
+
"grad_norm": 1.0265899896621704,
|
8061 |
+
"learning_rate": 1.837275303255165e-05,
|
8062 |
+
"loss": 2.0267,
|
8063 |
+
"step": 1146
|
8064 |
+
},
|
8065 |
+
{
|
8066 |
+
"epoch": 0.03915009813123987,
|
8067 |
+
"grad_norm": 1.2124122381210327,
|
8068 |
+
"learning_rate": 1.8244519317978638e-05,
|
8069 |
+
"loss": 1.8798,
|
8070 |
+
"step": 1147
|
8071 |
+
},
|
8072 |
+
{
|
8073 |
+
"epoch": 0.039184230736410953,
|
8074 |
+
"grad_norm": 0.759132444858551,
|
8075 |
+
"learning_rate": 1.811668974366805e-05,
|
8076 |
+
"loss": 1.7647,
|
8077 |
+
"step": 1148
|
8078 |
+
},
|
8079 |
+
{
|
8080 |
+
"epoch": 0.039218363341582047,
|
8081 |
+
"grad_norm": 1.0057313442230225,
|
8082 |
+
"learning_rate": 1.7989264941517293e-05,
|
8083 |
+
"loss": 1.9799,
|
8084 |
+
"step": 1149
|
8085 |
+
},
|
8086 |
+
{
|
8087 |
+
"epoch": 0.03925249594675313,
|
8088 |
+
"grad_norm": 1.1129032373428345,
|
8089 |
+
"learning_rate": 1.786224554142285e-05,
|
8090 |
+
"loss": 1.6973,
|
8091 |
+
"step": 1150
|
8092 |
+
},
|
8093 |
+
{
|
8094 |
+
"epoch": 0.039286628551924226,
|
8095 |
+
"grad_norm": 1.2832523584365845,
|
8096 |
+
"learning_rate": 1.7735632171277293e-05,
|
8097 |
+
"loss": 2.3506,
|
8098 |
+
"step": 1151
|
8099 |
+
},
|
8100 |
+
{
|
8101 |
+
"epoch": 0.03932076115709532,
|
8102 |
+
"grad_norm": 0.8572893738746643,
|
8103 |
+
"learning_rate": 1.7609425456965957e-05,
|
8104 |
+
"loss": 2.023,
|
8105 |
+
"step": 1152
|
8106 |
+
},
|
8107 |
+
{
|
8108 |
+
"epoch": 0.039354893762266405,
|
8109 |
+
"grad_norm": 1.2198123931884766,
|
8110 |
+
"learning_rate": 1.748362602236403e-05,
|
8111 |
+
"loss": 2.1272,
|
8112 |
+
"step": 1153
|
8113 |
+
},
|
8114 |
+
{
|
8115 |
+
"epoch": 0.0393890263674375,
|
8116 |
+
"grad_norm": 1.198418378829956,
|
8117 |
+
"learning_rate": 1.7358234489333413e-05,
|
8118 |
+
"loss": 2.2363,
|
8119 |
+
"step": 1154
|
8120 |
+
},
|
8121 |
+
{
|
8122 |
+
"epoch": 0.039423158972608584,
|
8123 |
+
"grad_norm": 1.1113969087600708,
|
8124 |
+
"learning_rate": 1.723325147771959e-05,
|
8125 |
+
"loss": 2.2373,
|
8126 |
+
"step": 1155
|
8127 |
+
},
|
8128 |
+
{
|
8129 |
+
"epoch": 0.03945729157777968,
|
8130 |
+
"grad_norm": 1.131749153137207,
|
8131 |
+
"learning_rate": 1.71086776053486e-05,
|
8132 |
+
"loss": 1.8949,
|
8133 |
+
"step": 1156
|
8134 |
+
},
|
8135 |
+
{
|
8136 |
+
"epoch": 0.03949142418295076,
|
8137 |
+
"grad_norm": 0.9236664175987244,
|
8138 |
+
"learning_rate": 1.698451348802407e-05,
|
8139 |
+
"loss": 2.5464,
|
8140 |
+
"step": 1157
|
8141 |
+
},
|
8142 |
+
{
|
8143 |
+
"epoch": 0.039525556788121856,
|
8144 |
+
"grad_norm": 0.9495575428009033,
|
8145 |
+
"learning_rate": 1.6860759739524003e-05,
|
8146 |
+
"loss": 2.1444,
|
8147 |
+
"step": 1158
|
8148 |
+
},
|
8149 |
+
{
|
8150 |
+
"epoch": 0.03955968939329294,
|
8151 |
+
"grad_norm": 0.826968252658844,
|
8152 |
+
"learning_rate": 1.6737416971597874e-05,
|
8153 |
+
"loss": 1.2139,
|
8154 |
+
"step": 1159
|
8155 |
+
},
|
8156 |
+
{
|
8157 |
+
"epoch": 0.039593821998464035,
|
8158 |
+
"grad_norm": 0.9489577412605286,
|
8159 |
+
"learning_rate": 1.661448579396362e-05,
|
8160 |
+
"loss": 1.7711,
|
8161 |
+
"step": 1160
|
8162 |
+
},
|
8163 |
+
{
|
8164 |
+
"epoch": 0.03962795460363512,
|
8165 |
+
"grad_norm": 1.1104600429534912,
|
8166 |
+
"learning_rate": 1.6491966814304483e-05,
|
8167 |
+
"loss": 1.9963,
|
8168 |
+
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.039662087208806214,
|
8172 |
+
"grad_norm": 1.0119625329971313,
|
8173 |
+
"learning_rate": 1.63698606382661e-05,
|
8174 |
+
"loss": 1.8866,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.0396962198139773,
|
8179 |
+
"grad_norm": 1.0610657930374146,
|
8180 |
+
"learning_rate": 1.624816786945358e-05,
|
8181 |
+
"loss": 2.1005,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.03973035241914839,
|
8186 |
+
"grad_norm": 1.1042702198028564,
|
8187 |
+
"learning_rate": 1.6126889109428345e-05,
|
8188 |
+
"loss": 2.2915,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.03976448502431948,
|
8193 |
+
"grad_norm": 0.9196921586990356,
|
8194 |
+
"learning_rate": 1.6006024957705355e-05,
|
8195 |
+
"loss": 1.6473,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.03979861762949057,
|
8200 |
+
"grad_norm": 1.0020651817321777,
|
8201 |
+
"learning_rate": 1.5885576011750004e-05,
|
8202 |
+
"loss": 1.929,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.03983275023466166,
|
8207 |
+
"grad_norm": 1.053682804107666,
|
8208 |
+
"learning_rate": 1.576554286697518e-05,
|
8209 |
+
"loss": 2.1577,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.03986688283983275,
|
8214 |
+
"grad_norm": 1.4489879608154297,
|
8215 |
+
"learning_rate": 1.5645926116738352e-05,
|
8216 |
+
"loss": 1.784,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.03990101544500384,
|
8221 |
+
"grad_norm": 1.1125417947769165,
|
8222 |
+
"learning_rate": 1.5526726352338695e-05,
|
8223 |
+
"loss": 2.1245,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.03993514805017493,
|
8228 |
+
"grad_norm": 0.9691295623779297,
|
8229 |
+
"learning_rate": 1.540794416301402e-05,
|
8230 |
+
"loss": 1.7619,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.039969280655346016,
|
8235 |
+
"grad_norm": 0.7235020995140076,
|
8236 |
+
"learning_rate": 1.528958013593801e-05,
|
8237 |
+
"loss": 1.305,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.04000341326051711,
|
8242 |
+
"grad_norm": 1.6450159549713135,
|
8243 |
+
"learning_rate": 1.5171634856217265e-05,
|
8244 |
+
"loss": 1.9669,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.040037545865688196,
|
8249 |
+
"grad_norm": 0.9872986674308777,
|
8250 |
+
"learning_rate": 1.5054108906888343e-05,
|
8251 |
+
"loss": 2.0619,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.04007167847085929,
|
8256 |
+
"grad_norm": 1.2894154787063599,
|
8257 |
+
"learning_rate": 1.493700286891494e-05,
|
8258 |
+
"loss": 2.5023,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.040105811076030375,
|
8263 |
+
"grad_norm": 0.9130205512046814,
|
8264 |
+
"learning_rate": 1.4820317321185074e-05,
|
8265 |
+
"loss": 1.8351,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.04013994368120147,
|
8270 |
+
"grad_norm": 1.0239819288253784,
|
8271 |
+
"learning_rate": 1.4704052840508087e-05,
|
8272 |
+
"loss": 2.2086,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.04017407628637256,
|
8277 |
+
"grad_norm": 1.2312549352645874,
|
8278 |
+
"learning_rate": 1.4588210001611923e-05,
|
8279 |
+
"loss": 2.2475,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.04020820889154365,
|
8284 |
+
"grad_norm": 0.9308437705039978,
|
8285 |
+
"learning_rate": 1.4472789377140239e-05,
|
8286 |
+
"loss": 2.0006,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.04024234149671474,
|
8291 |
+
"grad_norm": 0.7732666730880737,
|
8292 |
+
"learning_rate": 1.4357791537649512e-05,
|
8293 |
+
"loss": 1.9486,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.040276474101885826,
|
8298 |
+
"grad_norm": 1.1251686811447144,
|
8299 |
+
"learning_rate": 1.4243217051606283e-05,
|
8300 |
+
"loss": 1.9619,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.04031060670705692,
|
8305 |
+
"grad_norm": 1.1911123991012573,
|
8306 |
+
"learning_rate": 1.4129066485384413e-05,
|
8307 |
+
"loss": 2.1118,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.040344739312228005,
|
8312 |
+
"grad_norm": 1.0259900093078613,
|
8313 |
+
"learning_rate": 1.401534040326209e-05,
|
8314 |
+
"loss": 2.1011,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.0403788719173991,
|
8319 |
+
"grad_norm": 1.0434011220932007,
|
8320 |
+
"learning_rate": 1.3902039367419262e-05,
|
8321 |
+
"loss": 1.9632,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.040413004522570184,
|
8326 |
+
"grad_norm": 1.557401180267334,
|
8327 |
+
"learning_rate": 1.3789163937934735e-05,
|
8328 |
+
"loss": 1.9823,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.04044713712774128,
|
8333 |
+
"grad_norm": 1.2646514177322388,
|
8334 |
+
"learning_rate": 1.3676714672783375e-05,
|
8335 |
+
"loss": 1.9069,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.04048126973291236,
|
8340 |
+
"grad_norm": 1.0841634273529053,
|
8341 |
+
"learning_rate": 1.35646921278334e-05,
|
8342 |
+
"loss": 2.0297,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.040515402338083456,
|
8347 |
+
"grad_norm": 0.9418362379074097,
|
8348 |
+
"learning_rate": 1.3453096856843717e-05,
|
8349 |
+
"loss": 1.8374,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.04054953494325454,
|
8354 |
+
"grad_norm": 1.2162446975708008,
|
8355 |
+
"learning_rate": 1.3341929411460985e-05,
|
8356 |
+
"loss": 1.6233,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.040583667548425635,
|
8361 |
+
"grad_norm": 0.9932149648666382,
|
8362 |
+
"learning_rate": 1.3231190341217081e-05,
|
8363 |
+
"loss": 2.3625,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.04061780015359672,
|
8368 |
+
"grad_norm": 1.2502093315124512,
|
8369 |
+
"learning_rate": 1.3120880193526297e-05,
|
8370 |
+
"loss": 2.2901,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.040651932758767814,
|
8375 |
+
"grad_norm": 1.2793928384780884,
|
8376 |
+
"learning_rate": 1.3010999513682587e-05,
|
8377 |
+
"loss": 2.0938,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.0406860653639389,
|
8382 |
+
"grad_norm": 0.9983770847320557,
|
8383 |
+
"learning_rate": 1.2901548844856948e-05,
|
8384 |
+
"loss": 2.1336,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.04072019796910999,
|
8389 |
+
"grad_norm": 1.1454373598098755,
|
8390 |
+
"learning_rate": 1.2792528728094754e-05,
|
8391 |
+
"loss": 2.1178,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.04075433057428108,
|
8396 |
+
"grad_norm": 1.0002564191818237,
|
8397 |
+
"learning_rate": 1.2683939702312986e-05,
|
8398 |
+
"loss": 1.5577,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.04078846317945217,
|
8403 |
+
"grad_norm": 1.1003507375717163,
|
8404 |
+
"learning_rate": 1.2575782304297645e-05,
|
8405 |
+
"loss": 2.1667,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.04082259578462326,
|
8410 |
+
"grad_norm": 1.118606686592102,
|
8411 |
+
"learning_rate": 1.24680570687011e-05,
|
8412 |
+
"loss": 2.0049,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.04085672838979435,
|
8417 |
+
"grad_norm": 1.135435938835144,
|
8418 |
+
"learning_rate": 1.236076452803937e-05,
|
8419 |
+
"loss": 2.228,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.04089086099496544,
|
8424 |
+
"grad_norm": 1.006181001663208,
|
8425 |
+
"learning_rate": 1.2253905212689553e-05,
|
8426 |
+
"loss": 2.0717,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.04092499360013653,
|
8431 |
+
"grad_norm": 0.9195754528045654,
|
8432 |
+
"learning_rate": 1.2147479650887228e-05,
|
8433 |
+
"loss": 1.9253,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.040959126205307624,
|
8438 |
+
"grad_norm": 1.01576566696167,
|
8439 |
+
"learning_rate": 1.2041488368723764e-05,
|
8440 |
+
"loss": 1.9065,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.04099325881047871,
|
8445 |
+
"grad_norm": 0.9280171990394592,
|
8446 |
+
"learning_rate": 1.1935931890143792e-05,
|
8447 |
+
"loss": 2.0018,
|
8448 |
+
"step": 1201
|
8449 |
+
},
|
8450 |
+
{
|
8451 |
+
"epoch": 0.0410273914156498,
|
8452 |
+
"grad_norm": 1.116146445274353,
|
8453 |
+
"learning_rate": 1.183081073694261e-05,
|
8454 |
+
"loss": 1.8981,
|
8455 |
+
"step": 1202
|
8456 |
+
},
|
8457 |
+
{
|
8458 |
+
"epoch": 0.04106152402082089,
|
8459 |
+
"grad_norm": 1.0078309774398804,
|
8460 |
+
"learning_rate": 1.1726125428763524e-05,
|
8461 |
+
"loss": 1.9472,
|
8462 |
+
"step": 1203
|
8463 |
+
},
|
8464 |
+
{
|
8465 |
+
"epoch": 0.04109565662599198,
|
8466 |
+
"grad_norm": 0.8924429416656494,
|
8467 |
+
"learning_rate": 1.162187648309534e-05,
|
8468 |
+
"loss": 2.2341,
|
8469 |
+
"step": 1204
|
8470 |
+
},
|
8471 |
+
{
|
8472 |
+
"epoch": 0.04112978923116307,
|
8473 |
+
"grad_norm": 1.2216135263442993,
|
8474 |
+
"learning_rate": 1.151806441526987e-05,
|
8475 |
+
"loss": 2.2044,
|
8476 |
+
"step": 1205
|
8477 |
+
},
|
8478 |
+
{
|
8479 |
+
"epoch": 0.04116392183633416,
|
8480 |
+
"grad_norm": 1.0487927198410034,
|
8481 |
+
"learning_rate": 1.1414689738459228e-05,
|
8482 |
+
"loss": 2.1855,
|
8483 |
+
"step": 1206
|
8484 |
+
},
|
8485 |
+
{
|
8486 |
+
"epoch": 0.04119805444150525,
|
8487 |
+
"grad_norm": 0.8122748732566833,
|
8488 |
+
"learning_rate": 1.1311752963673439e-05,
|
8489 |
+
"loss": 1.3371,
|
8490 |
+
"step": 1207
|
8491 |
+
},
|
8492 |
+
{
|
8493 |
+
"epoch": 0.04123218704667634,
|
8494 |
+
"grad_norm": 1.2992409467697144,
|
8495 |
+
"learning_rate": 1.1209254599757867e-05,
|
8496 |
+
"loss": 1.8828,
|
8497 |
+
"step": 1208
|
8498 |
+
},
|
8499 |
+
{
|
8500 |
+
"epoch": 0.041266319651847426,
|
8501 |
+
"grad_norm": 0.88862544298172,
|
8502 |
+
"learning_rate": 1.1107195153390649e-05,
|
8503 |
+
"loss": 2.0216,
|
8504 |
+
"step": 1209
|
8505 |
+
},
|
8506 |
+
{
|
8507 |
+
"epoch": 0.04130045225701852,
|
8508 |
+
"grad_norm": 1.0354793071746826,
|
8509 |
+
"learning_rate": 1.1005575129080203e-05,
|
8510 |
+
"loss": 2.0217,
|
8511 |
+
"step": 1210
|
8512 |
+
},
|
8513 |
+
{
|
8514 |
+
"epoch": 0.041334584862189605,
|
8515 |
+
"grad_norm": 1.277265191078186,
|
8516 |
+
"learning_rate": 1.0904395029162861e-05,
|
8517 |
+
"loss": 2.0884,
|
8518 |
+
"step": 1211
|
8519 |
+
},
|
8520 |
+
{
|
8521 |
+
"epoch": 0.0413687174673607,
|
8522 |
+
"grad_norm": 1.059824824333191,
|
8523 |
+
"learning_rate": 1.0803655353800168e-05,
|
8524 |
+
"loss": 2.1253,
|
8525 |
+
"step": 1212
|
8526 |
+
},
|
8527 |
+
{
|
8528 |
+
"epoch": 0.041402850072531784,
|
8529 |
+
"grad_norm": 1.0603164434432983,
|
8530 |
+
"learning_rate": 1.0703356600976611e-05,
|
8531 |
+
"loss": 1.9346,
|
8532 |
+
"step": 1213
|
8533 |
+
},
|
8534 |
+
{
|
8535 |
+
"epoch": 0.04143698267770288,
|
8536 |
+
"grad_norm": 0.8811891674995422,
|
8537 |
+
"learning_rate": 1.0603499266497063e-05,
|
8538 |
+
"loss": 1.4808,
|
8539 |
+
"step": 1214
|
8540 |
+
},
|
8541 |
+
{
|
8542 |
+
"epoch": 0.04147111528287396,
|
8543 |
+
"grad_norm": 0.9846670031547546,
|
8544 |
+
"learning_rate": 1.0504083843984303e-05,
|
8545 |
+
"loss": 2.2111,
|
8546 |
+
"step": 1215
|
8547 |
+
},
|
8548 |
+
{
|
8549 |
+
"epoch": 0.041505247888045056,
|
8550 |
+
"grad_norm": 1.0400886535644531,
|
8551 |
+
"learning_rate": 1.0405110824876619e-05,
|
8552 |
+
"loss": 2.2698,
|
8553 |
+
"step": 1216
|
8554 |
+
},
|
8555 |
+
{
|
8556 |
+
"epoch": 0.04153938049321614,
|
8557 |
+
"grad_norm": 1.431240439414978,
|
8558 |
+
"learning_rate": 1.0306580698425439e-05,
|
8559 |
+
"loss": 2.4363,
|
8560 |
+
"step": 1217
|
8561 |
+
},
|
8562 |
+
{
|
8563 |
+
"epoch": 0.041573513098387235,
|
8564 |
+
"grad_norm": 1.057084321975708,
|
8565 |
+
"learning_rate": 1.0208493951692755e-05,
|
8566 |
+
"loss": 2.1709,
|
8567 |
+
"step": 1218
|
8568 |
+
},
|
8569 |
+
{
|
8570 |
+
"epoch": 0.04160764570355832,
|
8571 |
+
"grad_norm": 1.2384350299835205,
|
8572 |
+
"learning_rate": 1.0110851069548889e-05,
|
8573 |
+
"loss": 2.5614,
|
8574 |
+
"step": 1219
|
8575 |
+
},
|
8576 |
+
{
|
8577 |
+
"epoch": 0.041641778308729414,
|
8578 |
+
"grad_norm": 1.8598047494888306,
|
8579 |
+
"learning_rate": 1.0013652534669982e-05,
|
8580 |
+
"loss": 1.9105,
|
8581 |
+
"step": 1220
|
8582 |
+
},
|
8583 |
+
{
|
8584 |
+
"epoch": 0.0416759109139005,
|
8585 |
+
"grad_norm": 1.219939112663269,
|
8586 |
+
"learning_rate": 9.916898827535625e-06,
|
8587 |
+
"loss": 2.1016,
|
8588 |
+
"step": 1221
|
8589 |
+
},
|
8590 |
+
{
|
8591 |
+
"epoch": 0.04171004351907159,
|
8592 |
+
"grad_norm": 1.637976884841919,
|
8593 |
+
"learning_rate": 9.820590426426523e-06,
|
8594 |
+
"loss": 2.2553,
|
8595 |
+
"step": 1222
|
8596 |
+
},
|
8597 |
+
{
|
8598 |
+
"epoch": 0.04174417612424268,
|
8599 |
+
"grad_norm": 0.7209195494651794,
|
8600 |
+
"learning_rate": 9.724727807422118e-06,
|
8601 |
+
"loss": 0.8863,
|
8602 |
+
"step": 1223
|
8603 |
+
},
|
8604 |
+
{
|
8605 |
+
"epoch": 0.04177830872941377,
|
8606 |
+
"grad_norm": 1.1882284879684448,
|
8607 |
+
"learning_rate": 9.629311444398192e-06,
|
8608 |
+
"loss": 1.9922,
|
8609 |
+
"step": 1224
|
8610 |
+
},
|
8611 |
+
{
|
8612 |
+
"epoch": 0.041812441334584866,
|
8613 |
+
"grad_norm": 1.375827431678772,
|
8614 |
+
"learning_rate": 9.534341809024583e-06,
|
8615 |
+
"loss": 1.7664,
|
8616 |
+
"step": 1225
|
8617 |
+
},
|
8618 |
+
{
|
8619 |
+
"epoch": 0.04184657393975595,
|
8620 |
+
"grad_norm": 1.847652792930603,
|
8621 |
+
"learning_rate": 9.439819370762848e-06,
|
8622 |
+
"loss": 1.9673,
|
8623 |
+
"step": 1226
|
8624 |
+
},
|
8625 |
+
{
|
8626 |
+
"epoch": 0.041880706544927045,
|
8627 |
+
"grad_norm": 0.8568788766860962,
|
8628 |
+
"learning_rate": 9.345744596863892e-06,
|
8629 |
+
"loss": 1.5002,
|
8630 |
+
"step": 1227
|
8631 |
+
},
|
8632 |
+
{
|
8633 |
+
"epoch": 0.04191483915009813,
|
8634 |
+
"grad_norm": 1.2382780313491821,
|
8635 |
+
"learning_rate": 9.252117952365669e-06,
|
8636 |
+
"loss": 2.2039,
|
8637 |
+
"step": 1228
|
8638 |
+
},
|
8639 |
+
{
|
8640 |
+
"epoch": 0.041948971755269224,
|
8641 |
+
"grad_norm": 1.3276573419570923,
|
8642 |
+
"learning_rate": 9.158939900090969e-06,
|
8643 |
+
"loss": 2.1186,
|
8644 |
+
"step": 1229
|
8645 |
+
},
|
8646 |
+
{
|
8647 |
+
"epoch": 0.04198310436044031,
|
8648 |
+
"grad_norm": 1.072721004486084,
|
8649 |
+
"learning_rate": 9.066210900644988e-06,
|
8650 |
+
"loss": 1.9285,
|
8651 |
+
"step": 1230
|
8652 |
+
},
|
8653 |
+
{
|
8654 |
+
"epoch": 0.0420172369656114,
|
8655 |
+
"grad_norm": 1.3971753120422363,
|
8656 |
+
"learning_rate": 8.973931412413195e-06,
|
8657 |
+
"loss": 2.2074,
|
8658 |
+
"step": 1231
|
8659 |
+
},
|
8660 |
+
{
|
8661 |
+
"epoch": 0.04205136957078249,
|
8662 |
+
"grad_norm": 1.036306619644165,
|
8663 |
+
"learning_rate": 8.882101891558991e-06,
|
8664 |
+
"loss": 1.5845,
|
8665 |
+
"step": 1232
|
8666 |
+
},
|
8667 |
+
{
|
8668 |
+
"epoch": 0.04208550217595358,
|
8669 |
+
"grad_norm": 0.9555867314338684,
|
8670 |
+
"learning_rate": 8.790722792021422e-06,
|
8671 |
+
"loss": 1.9046,
|
8672 |
+
"step": 1233
|
8673 |
+
},
|
8674 |
+
{
|
8675 |
+
"epoch": 0.04211963478112467,
|
8676 |
+
"grad_norm": 0.9409034252166748,
|
8677 |
+
"learning_rate": 8.699794565512975e-06,
|
8678 |
+
"loss": 2.3489,
|
8679 |
+
"step": 1234
|
8680 |
+
},
|
8681 |
+
{
|
8682 |
+
"epoch": 0.04215376738629576,
|
8683 |
+
"grad_norm": 1.414501428604126,
|
8684 |
+
"learning_rate": 8.609317661517413e-06,
|
8685 |
+
"loss": 2.1279,
|
8686 |
+
"step": 1235
|
8687 |
+
},
|
8688 |
+
{
|
8689 |
+
"epoch": 0.04218789999146685,
|
8690 |
+
"grad_norm": 1.08974027633667,
|
8691 |
+
"learning_rate": 8.519292527287393e-06,
|
8692 |
+
"loss": 2.0485,
|
8693 |
+
"step": 1236
|
8694 |
+
},
|
8695 |
+
{
|
8696 |
+
"epoch": 0.04222203259663794,
|
8697 |
+
"grad_norm": 0.8222129940986633,
|
8698 |
+
"learning_rate": 8.429719607842412e-06,
|
8699 |
+
"loss": 1.3368,
|
8700 |
+
"step": 1237
|
8701 |
+
},
|
8702 |
+
{
|
8703 |
+
"epoch": 0.042256165201809026,
|
8704 |
+
"grad_norm": 1.1131757497787476,
|
8705 |
+
"learning_rate": 8.340599345966538e-06,
|
8706 |
+
"loss": 2.0362,
|
8707 |
+
"step": 1238
|
8708 |
+
},
|
8709 |
+
{
|
8710 |
+
"epoch": 0.04229029780698012,
|
8711 |
+
"grad_norm": 1.138298749923706,
|
8712 |
+
"learning_rate": 8.251932182206212e-06,
|
8713 |
+
"loss": 2.1015,
|
8714 |
+
"step": 1239
|
8715 |
+
},
|
8716 |
+
{
|
8717 |
+
"epoch": 0.042324430412151205,
|
8718 |
+
"grad_norm": 1.282381534576416,
|
8719 |
+
"learning_rate": 8.16371855486805e-06,
|
8720 |
+
"loss": 1.9064,
|
8721 |
+
"step": 1240
|
8722 |
+
},
|
8723 |
+
{
|
8724 |
+
"epoch": 0.0423585630173223,
|
8725 |
+
"grad_norm": 1.2661985158920288,
|
8726 |
+
"learning_rate": 8.075958900016778e-06,
|
8727 |
+
"loss": 1.5026,
|
8728 |
+
"step": 1241
|
8729 |
+
},
|
8730 |
+
{
|
8731 |
+
"epoch": 0.042392695622493384,
|
8732 |
+
"grad_norm": 1.2062397003173828,
|
8733 |
+
"learning_rate": 7.988653651472955e-06,
|
8734 |
+
"loss": 1.7793,
|
8735 |
+
"step": 1242
|
8736 |
+
},
|
8737 |
+
{
|
8738 |
+
"epoch": 0.04242682822766448,
|
8739 |
+
"grad_norm": 1.0281583070755005,
|
8740 |
+
"learning_rate": 7.9018032408109e-06,
|
8741 |
+
"loss": 2.1159,
|
8742 |
+
"step": 1243
|
8743 |
+
},
|
8744 |
+
{
|
8745 |
+
"epoch": 0.04246096083283556,
|
8746 |
+
"grad_norm": 1.3463506698608398,
|
8747 |
+
"learning_rate": 7.81540809735658e-06,
|
8748 |
+
"loss": 2.3146,
|
8749 |
+
"step": 1244
|
8750 |
+
},
|
8751 |
+
{
|
8752 |
+
"epoch": 0.042495093438006656,
|
8753 |
+
"grad_norm": 1.0962762832641602,
|
8754 |
+
"learning_rate": 7.729468648185378e-06,
|
8755 |
+
"loss": 2.1094,
|
8756 |
+
"step": 1245
|
8757 |
+
},
|
8758 |
+
{
|
8759 |
+
"epoch": 0.04252922604317774,
|
8760 |
+
"grad_norm": 0.8759163618087769,
|
8761 |
+
"learning_rate": 7.643985318120073e-06,
|
8762 |
+
"loss": 1.6075,
|
8763 |
+
"step": 1246
|
8764 |
+
},
|
8765 |
+
{
|
8766 |
+
"epoch": 0.042563358648348835,
|
8767 |
+
"grad_norm": 0.9881116151809692,
|
8768 |
+
"learning_rate": 7.558958529728766e-06,
|
8769 |
+
"loss": 1.8739,
|
8770 |
+
"step": 1247
|
8771 |
+
},
|
8772 |
+
{
|
8773 |
+
"epoch": 0.04259749125351992,
|
8774 |
+
"grad_norm": 1.2572274208068848,
|
8775 |
+
"learning_rate": 7.4743887033226765e-06,
|
8776 |
+
"loss": 2.3844,
|
8777 |
+
"step": 1248
|
8778 |
+
},
|
8779 |
+
{
|
8780 |
+
"epoch": 0.042631623858691015,
|
8781 |
+
"grad_norm": 0.7722702622413635,
|
8782 |
+
"learning_rate": 7.390276256954187e-06,
|
8783 |
+
"loss": 1.659,
|
8784 |
+
"step": 1249
|
8785 |
+
},
|
8786 |
+
{
|
8787 |
+
"epoch": 0.04266575646386211,
|
8788 |
+
"grad_norm": 0.9714618921279907,
|
8789 |
+
"learning_rate": 7.306621606414721e-06,
|
8790 |
+
"loss": 2.5873,
|
8791 |
+
"step": 1250
|
8792 |
+
},
|
8793 |
+
{
|
8794 |
+
"epoch": 0.042699889069033194,
|
8795 |
+
"grad_norm": 0.6678300499916077,
|
8796 |
+
"learning_rate": 7.223425165232644e-06,
|
8797 |
+
"loss": 1.5758,
|
8798 |
+
"step": 1251
|
8799 |
+
},
|
8800 |
+
{
|
8801 |
+
"epoch": 0.04273402167420429,
|
8802 |
+
"grad_norm": 1.1980642080307007,
|
8803 |
+
"learning_rate": 7.140687344671282e-06,
|
8804 |
+
"loss": 2.3525,
|
8805 |
+
"step": 1252
|
8806 |
+
},
|
8807 |
+
{
|
8808 |
+
"epoch": 0.04276815427937537,
|
8809 |
+
"grad_norm": 0.8456406593322754,
|
8810 |
+
"learning_rate": 7.0584085537268805e-06,
|
8811 |
+
"loss": 2.4933,
|
8812 |
+
"step": 1253
|
8813 |
+
},
|
8814 |
+
{
|
8815 |
+
"epoch": 0.042802286884546466,
|
8816 |
+
"grad_norm": 1.1540251970291138,
|
8817 |
+
"learning_rate": 6.976589199126615e-06,
|
8818 |
+
"loss": 2.1932,
|
8819 |
+
"step": 1254
|
8820 |
+
},
|
8821 |
+
{
|
8822 |
+
"epoch": 0.04283641948971755,
|
8823 |
+
"grad_norm": 0.9036734104156494,
|
8824 |
+
"learning_rate": 6.895229685326443e-06,
|
8825 |
+
"loss": 2.0943,
|
8826 |
+
"step": 1255
|
8827 |
+
},
|
8828 |
+
{
|
8829 |
+
"epoch": 0.042870552094888645,
|
8830 |
+
"grad_norm": 0.7628289461135864,
|
8831 |
+
"learning_rate": 6.814330414509284e-06,
|
8832 |
+
"loss": 1.33,
|
8833 |
+
"step": 1256
|
8834 |
+
},
|
8835 |
+
{
|
8836 |
+
"epoch": 0.04290468470005973,
|
8837 |
+
"grad_norm": 0.8999051451683044,
|
8838 |
+
"learning_rate": 6.733891786582902e-06,
|
8839 |
+
"loss": 2.1969,
|
8840 |
+
"step": 1257
|
8841 |
+
},
|
8842 |
+
{
|
8843 |
+
"epoch": 0.042938817305230824,
|
8844 |
+
"grad_norm": 1.0054935216903687,
|
8845 |
+
"learning_rate": 6.653914199177969e-06,
|
8846 |
+
"loss": 1.9849,
|
8847 |
+
"step": 1258
|
8848 |
+
},
|
8849 |
+
{
|
8850 |
+
"epoch": 0.04297294991040191,
|
8851 |
+
"grad_norm": 1.045370101928711,
|
8852 |
+
"learning_rate": 6.574398047646147e-06,
|
8853 |
+
"loss": 2.2088,
|
8854 |
+
"step": 1259
|
8855 |
+
},
|
8856 |
+
{
|
8857 |
+
"epoch": 0.043007082515573,
|
8858 |
+
"grad_norm": 1.2135913372039795,
|
8859 |
+
"learning_rate": 6.495343725058067e-06,
|
8860 |
+
"loss": 2.462,
|
8861 |
+
"step": 1260
|
8862 |
+
},
|
8863 |
+
{
|
8864 |
+
"epoch": 0.04304121512074409,
|
8865 |
+
"grad_norm": 1.2691388130187988,
|
8866 |
+
"learning_rate": 6.416751622201389e-06,
|
8867 |
+
"loss": 2.2877,
|
8868 |
+
"step": 1261
|
8869 |
+
},
|
8870 |
+
{
|
8871 |
+
"epoch": 0.04307534772591518,
|
8872 |
+
"grad_norm": 1.1196742057800293,
|
8873 |
+
"learning_rate": 6.338622127578941e-06,
|
8874 |
+
"loss": 2.4206,
|
8875 |
+
"step": 1262
|
8876 |
+
},
|
8877 |
+
{
|
8878 |
+
"epoch": 0.04310948033108627,
|
8879 |
+
"grad_norm": 1.1406705379486084,
|
8880 |
+
"learning_rate": 6.260955627406706e-06,
|
8881 |
+
"loss": 1.8407,
|
8882 |
+
"step": 1263
|
8883 |
+
},
|
8884 |
+
{
|
8885 |
+
"epoch": 0.04314361293625736,
|
8886 |
+
"grad_norm": 0.9716781973838806,
|
8887 |
+
"learning_rate": 6.183752505611962e-06,
|
8888 |
+
"loss": 1.9633,
|
8889 |
+
"step": 1264
|
8890 |
+
},
|
8891 |
+
{
|
8892 |
+
"epoch": 0.04317774554142845,
|
8893 |
+
"grad_norm": 0.9820749163627625,
|
8894 |
+
"learning_rate": 6.107013143831375e-06,
|
8895 |
+
"loss": 2.3622,
|
8896 |
+
"step": 1265
|
8897 |
+
},
|
8898 |
+
{
|
8899 |
+
"epoch": 0.04321187814659954,
|
8900 |
+
"grad_norm": 1.1462911367416382,
|
8901 |
+
"learning_rate": 6.030737921409169e-06,
|
8902 |
+
"loss": 2.08,
|
8903 |
+
"step": 1266
|
8904 |
+
},
|
8905 |
+
{
|
8906 |
+
"epoch": 0.043246010751770626,
|
8907 |
+
"grad_norm": 1.160678505897522,
|
8908 |
+
"learning_rate": 5.954927215395101e-06,
|
8909 |
+
"loss": 1.7615,
|
8910 |
+
"step": 1267
|
8911 |
+
},
|
8912 |
+
{
|
8913 |
+
"epoch": 0.04328014335694172,
|
8914 |
+
"grad_norm": 1.3681670427322388,
|
8915 |
+
"learning_rate": 5.879581400542789e-06,
|
8916 |
+
"loss": 2.5504,
|
8917 |
+
"step": 1268
|
8918 |
+
},
|
8919 |
+
{
|
8920 |
+
"epoch": 0.043314275962112805,
|
8921 |
+
"grad_norm": 1.1451773643493652,
|
8922 |
+
"learning_rate": 5.804700849307698e-06,
|
8923 |
+
"loss": 2.1651,
|
8924 |
+
"step": 1269
|
8925 |
+
},
|
8926 |
+
{
|
8927 |
+
"epoch": 0.0433484085672839,
|
8928 |
+
"grad_norm": 1.0372421741485596,
|
8929 |
+
"learning_rate": 5.73028593184538e-06,
|
8930 |
+
"loss": 2.4043,
|
8931 |
+
"step": 1270
|
8932 |
+
},
|
8933 |
+
{
|
8934 |
+
"epoch": 0.043382541172454984,
|
8935 |
+
"grad_norm": 0.9764067530632019,
|
8936 |
+
"learning_rate": 5.656337016009649e-06,
|
8937 |
+
"loss": 1.6697,
|
8938 |
+
"step": 1271
|
8939 |
+
},
|
8940 |
+
{
|
8941 |
+
"epoch": 0.04341667377762608,
|
8942 |
+
"grad_norm": 1.0566787719726562,
|
8943 |
+
"learning_rate": 5.582854467350729e-06,
|
8944 |
+
"loss": 2.1395,
|
8945 |
+
"step": 1272
|
8946 |
+
},
|
8947 |
+
{
|
8948 |
+
"epoch": 0.043450806382797164,
|
8949 |
+
"grad_norm": 1.0797021389007568,
|
8950 |
+
"learning_rate": 5.5098386491134415e-06,
|
8951 |
+
"loss": 2.0308,
|
8952 |
+
"step": 1273
|
8953 |
+
},
|
8954 |
+
{
|
8955 |
+
"epoch": 0.04348493898796826,
|
8956 |
+
"grad_norm": 0.7811318635940552,
|
8957 |
+
"learning_rate": 5.437289922235489e-06,
|
8958 |
+
"loss": 1.458,
|
8959 |
+
"step": 1274
|
8960 |
+
},
|
8961 |
+
{
|
8962 |
+
"epoch": 0.04351907159313935,
|
8963 |
+
"grad_norm": 1.0513478517532349,
|
8964 |
+
"learning_rate": 5.365208645345532e-06,
|
8965 |
+
"loss": 2.1594,
|
8966 |
+
"step": 1275
|
8967 |
+
},
|
8968 |
+
{
|
8969 |
+
"epoch": 0.043553204198310436,
|
8970 |
+
"grad_norm": 1.5424909591674805,
|
8971 |
+
"learning_rate": 5.293595174761523e-06,
|
8972 |
+
"loss": 2.2656,
|
8973 |
+
"step": 1276
|
8974 |
+
},
|
8975 |
+
{
|
8976 |
+
"epoch": 0.04358733680348153,
|
8977 |
+
"grad_norm": 1.041947841644287,
|
8978 |
+
"learning_rate": 5.222449864488943e-06,
|
8979 |
+
"loss": 2.4717,
|
8980 |
+
"step": 1277
|
8981 |
+
},
|
8982 |
+
{
|
8983 |
+
"epoch": 0.043621469408652615,
|
8984 |
+
"grad_norm": 1.1072274446487427,
|
8985 |
+
"learning_rate": 5.151773066219023e-06,
|
8986 |
+
"loss": 2.1884,
|
8987 |
+
"step": 1278
|
8988 |
+
},
|
8989 |
+
{
|
8990 |
+
"epoch": 0.04365560201382371,
|
8991 |
+
"grad_norm": 0.9801862835884094,
|
8992 |
+
"learning_rate": 5.08156512932696e-06,
|
8993 |
+
"loss": 2.2912,
|
8994 |
+
"step": 1279
|
8995 |
+
},
|
8996 |
+
{
|
8997 |
+
"epoch": 0.043689734618994794,
|
8998 |
+
"grad_norm": 1.3005621433258057,
|
8999 |
+
"learning_rate": 5.011826400870301e-06,
|
9000 |
+
"loss": 2.0833,
|
9001 |
+
"step": 1280
|
9002 |
+
},
|
9003 |
+
{
|
9004 |
+
"epoch": 0.04372386722416589,
|
9005 |
+
"grad_norm": 0.9827429056167603,
|
9006 |
+
"learning_rate": 4.942557225587119e-06,
|
9007 |
+
"loss": 1.9805,
|
9008 |
+
"step": 1281
|
9009 |
+
},
|
9010 |
+
{
|
9011 |
+
"epoch": 0.04375799982933697,
|
9012 |
+
"grad_norm": 1.1868135929107666,
|
9013 |
+
"learning_rate": 4.8737579458943525e-06,
|
9014 |
+
"loss": 2.2326,
|
9015 |
+
"step": 1282
|
9016 |
+
},
|
9017 |
+
{
|
9018 |
+
"epoch": 0.043792132434508066,
|
9019 |
+
"grad_norm": 0.9688858985900879,
|
9020 |
+
"learning_rate": 4.805428901886144e-06,
|
9021 |
+
"loss": 1.8055,
|
9022 |
+
"step": 1283
|
9023 |
+
},
|
9024 |
+
{
|
9025 |
+
"epoch": 0.04382626503967915,
|
9026 |
+
"grad_norm": 1.0670350790023804,
|
9027 |
+
"learning_rate": 4.737570431332128e-06,
|
9028 |
+
"loss": 2.3953,
|
9029 |
+
"step": 1284
|
9030 |
+
},
|
9031 |
+
{
|
9032 |
+
"epoch": 0.043860397644850245,
|
9033 |
+
"grad_norm": 1.0366250276565552,
|
9034 |
+
"learning_rate": 4.670182869675721e-06,
|
9035 |
+
"loss": 2.0958,
|
9036 |
+
"step": 1285
|
9037 |
+
},
|
9038 |
+
{
|
9039 |
+
"epoch": 0.04389453025002133,
|
9040 |
+
"grad_norm": 1.1750410795211792,
|
9041 |
+
"learning_rate": 4.60326655003257e-06,
|
9042 |
+
"loss": 1.7865,
|
9043 |
+
"step": 1286
|
9044 |
+
},
|
9045 |
+
{
|
9046 |
+
"epoch": 0.043928662855192424,
|
9047 |
+
"grad_norm": 1.0243622064590454,
|
9048 |
+
"learning_rate": 4.536821803188773e-06,
|
9049 |
+
"loss": 2.349,
|
9050 |
+
"step": 1287
|
9051 |
+
},
|
9052 |
+
{
|
9053 |
+
"epoch": 0.04396279546036351,
|
9054 |
+
"grad_norm": 1.104209303855896,
|
9055 |
+
"learning_rate": 4.4708489575993496e-06,
|
9056 |
+
"loss": 2.1217,
|
9057 |
+
"step": 1288
|
9058 |
+
},
|
9059 |
+
{
|
9060 |
+
"epoch": 0.0439969280655346,
|
9061 |
+
"grad_norm": 1.0890909433364868,
|
9062 |
+
"learning_rate": 4.405348339386572e-06,
|
9063 |
+
"loss": 2.1666,
|
9064 |
+
"step": 1289
|
9065 |
+
},
|
9066 |
+
{
|
9067 |
+
"epoch": 0.04403106067070569,
|
9068 |
+
"grad_norm": 1.007041573524475,
|
9069 |
+
"learning_rate": 4.3403202723383715e-06,
|
9070 |
+
"loss": 1.8612,
|
9071 |
+
"step": 1290
|
9072 |
+
},
|
9073 |
+
{
|
9074 |
+
"epoch": 0.04406519327587678,
|
9075 |
+
"grad_norm": 0.9562507271766663,
|
9076 |
+
"learning_rate": 4.275765077906701e-06,
|
9077 |
+
"loss": 2.1967,
|
9078 |
+
"step": 1291
|
9079 |
+
},
|
9080 |
+
{
|
9081 |
+
"epoch": 0.04409932588104787,
|
9082 |
+
"grad_norm": 1.0435267686843872,
|
9083 |
+
"learning_rate": 4.211683075206007e-06,
|
9084 |
+
"loss": 1.5391,
|
9085 |
+
"step": 1292
|
9086 |
+
},
|
9087 |
+
{
|
9088 |
+
"epoch": 0.04413345848621896,
|
9089 |
+
"grad_norm": 1.0471081733703613,
|
9090 |
+
"learning_rate": 4.148074581011574e-06,
|
9091 |
+
"loss": 2.2348,
|
9092 |
+
"step": 1293
|
9093 |
+
},
|
9094 |
+
{
|
9095 |
+
"epoch": 0.04416759109139005,
|
9096 |
+
"grad_norm": 0.9029792547225952,
|
9097 |
+
"learning_rate": 4.084939909758012e-06,
|
9098 |
+
"loss": 2.1077,
|
9099 |
+
"step": 1294
|
9100 |
+
},
|
9101 |
+
{
|
9102 |
+
"epoch": 0.04420172369656114,
|
9103 |
+
"grad_norm": 1.1913082599639893,
|
9104 |
+
"learning_rate": 4.02227937353773e-06,
|
9105 |
+
"loss": 1.9741,
|
9106 |
+
"step": 1295
|
9107 |
+
},
|
9108 |
+
{
|
9109 |
+
"epoch": 0.044235856301732226,
|
9110 |
+
"grad_norm": 0.9432790875434875,
|
9111 |
+
"learning_rate": 3.96009328209932e-06,
|
9112 |
+
"loss": 2.3753,
|
9113 |
+
"step": 1296
|
9114 |
+
},
|
9115 |
+
{
|
9116 |
+
"epoch": 0.04426998890690332,
|
9117 |
+
"grad_norm": 1.4276188611984253,
|
9118 |
+
"learning_rate": 3.898381942846041e-06,
|
9119 |
+
"loss": 1.695,
|
9120 |
+
"step": 1297
|
9121 |
+
},
|
9122 |
+
{
|
9123 |
+
"epoch": 0.04430412151207441,
|
9124 |
+
"grad_norm": 1.0604877471923828,
|
9125 |
+
"learning_rate": 3.837145660834385e-06,
|
9126 |
+
"loss": 2.1419,
|
9127 |
+
"step": 1298
|
9128 |
+
},
|
9129 |
+
{
|
9130 |
+
"epoch": 0.0443382541172455,
|
9131 |
+
"grad_norm": 1.2316205501556396,
|
9132 |
+
"learning_rate": 3.7763847387724427e-06,
|
9133 |
+
"loss": 2.5856,
|
9134 |
+
"step": 1299
|
9135 |
+
},
|
9136 |
+
{
|
9137 |
+
"epoch": 0.04437238672241659,
|
9138 |
+
"grad_norm": 1.2546662092208862,
|
9139 |
+
"learning_rate": 3.7160994770184755e-06,
|
9140 |
+
"loss": 2.5074,
|
9141 |
+
"step": 1300
|
9142 |
+
},
|
9143 |
+
{
|
9144 |
+
"epoch": 0.04440651932758768,
|
9145 |
+
"grad_norm": 1.338253378868103,
|
9146 |
+
"learning_rate": 3.656290173579446e-06,
|
9147 |
+
"loss": 2.0539,
|
9148 |
+
"step": 1301
|
9149 |
+
},
|
9150 |
+
{
|
9151 |
+
"epoch": 0.04444065193275877,
|
9152 |
+
"grad_norm": 1.2859060764312744,
|
9153 |
+
"learning_rate": 3.5969571241095124e-06,
|
9154 |
+
"loss": 1.9862,
|
9155 |
+
"step": 1302
|
9156 |
+
},
|
9157 |
+
{
|
9158 |
+
"epoch": 0.04447478453792986,
|
9159 |
+
"grad_norm": 0.5853189826011658,
|
9160 |
+
"learning_rate": 3.5381006219085687e-06,
|
9161 |
+
"loss": 1.0462,
|
9162 |
+
"step": 1303
|
9163 |
+
},
|
9164 |
+
{
|
9165 |
+
"epoch": 0.04450891714310095,
|
9166 |
+
"grad_norm": 1.0077992677688599,
|
9167 |
+
"learning_rate": 3.4797209579208066e-06,
|
9168 |
+
"loss": 2.0912,
|
9169 |
+
"step": 1304
|
9170 |
+
},
|
9171 |
+
{
|
9172 |
+
"epoch": 0.044543049748272036,
|
9173 |
+
"grad_norm": 1.234041690826416,
|
9174 |
+
"learning_rate": 3.4218184207332806e-06,
|
9175 |
+
"loss": 2.0927,
|
9176 |
+
"step": 1305
|
9177 |
+
},
|
9178 |
+
{
|
9179 |
+
"epoch": 0.04457718235344313,
|
9180 |
+
"grad_norm": 1.0874733924865723,
|
9181 |
+
"learning_rate": 3.364393296574453e-06,
|
9182 |
+
"loss": 2.2452,
|
9183 |
+
"step": 1306
|
9184 |
+
},
|
9185 |
+
{
|
9186 |
+
"epoch": 0.044611314958614215,
|
9187 |
+
"grad_norm": 0.9964419603347778,
|
9188 |
+
"learning_rate": 3.3074458693128086e-06,
|
9189 |
+
"loss": 1.7768,
|
9190 |
+
"step": 1307
|
9191 |
+
},
|
9192 |
+
{
|
9193 |
+
"epoch": 0.04464544756378531,
|
9194 |
+
"grad_norm": 0.941646933555603,
|
9195 |
+
"learning_rate": 3.2509764204554648e-06,
|
9196 |
+
"loss": 2.1365,
|
9197 |
+
"step": 1308
|
9198 |
+
},
|
9199 |
+
{
|
9200 |
+
"epoch": 0.044679580168956394,
|
9201 |
+
"grad_norm": 1.1146620512008667,
|
9202 |
+
"learning_rate": 3.194985229146741e-06,
|
9203 |
+
"loss": 2.0593,
|
9204 |
+
"step": 1309
|
9205 |
+
},
|
9206 |
+
{
|
9207 |
+
"epoch": 0.04471371277412749,
|
9208 |
+
"grad_norm": 1.0267490148544312,
|
9209 |
+
"learning_rate": 3.1394725721667793e-06,
|
9210 |
+
"loss": 2.1974,
|
9211 |
+
"step": 1310
|
9212 |
+
},
|
9213 |
+
{
|
9214 |
+
"epoch": 0.04474784537929857,
|
9215 |
+
"grad_norm": 1.1870267391204834,
|
9216 |
+
"learning_rate": 3.0844387239302053e-06,
|
9217 |
+
"loss": 2.4985,
|
9218 |
+
"step": 1311
|
9219 |
+
},
|
9220 |
+
{
|
9221 |
+
"epoch": 0.044781977984469666,
|
9222 |
+
"grad_norm": 1.1883535385131836,
|
9223 |
+
"learning_rate": 3.0298839564847805e-06,
|
9224 |
+
"loss": 1.8583,
|
9225 |
+
"step": 1312
|
9226 |
+
},
|
9227 |
+
{
|
9228 |
+
"epoch": 0.04481611058964075,
|
9229 |
+
"grad_norm": 1.052564263343811,
|
9230 |
+
"learning_rate": 2.9758085395099834e-06,
|
9231 |
+
"loss": 2.1254,
|
9232 |
+
"step": 1313
|
9233 |
+
},
|
9234 |
+
{
|
9235 |
+
"epoch": 0.044850243194811845,
|
9236 |
+
"grad_norm": 0.923747718334198,
|
9237 |
+
"learning_rate": 2.9222127403157775e-06,
|
9238 |
+
"loss": 1.9149,
|
9239 |
+
"step": 1314
|
9240 |
+
},
|
9241 |
+
{
|
9242 |
+
"epoch": 0.04488437579998293,
|
9243 |
+
"grad_norm": 1.1016491651535034,
|
9244 |
+
"learning_rate": 2.869096823841244e-06,
|
9245 |
+
"loss": 1.9526,
|
9246 |
+
"step": 1315
|
9247 |
+
},
|
9248 |
+
{
|
9249 |
+
"epoch": 0.044918508405154024,
|
9250 |
+
"grad_norm": 1.035510540008545,
|
9251 |
+
"learning_rate": 2.81646105265323e-06,
|
9252 |
+
"loss": 1.5543,
|
9253 |
+
"step": 1316
|
9254 |
+
},
|
9255 |
+
{
|
9256 |
+
"epoch": 0.04495264101032511,
|
9257 |
+
"grad_norm": 1.130951166152954,
|
9258 |
+
"learning_rate": 2.764305686945101e-06,
|
9259 |
+
"loss": 2.0834,
|
9260 |
+
"step": 1317
|
9261 |
+
},
|
9262 |
+
{
|
9263 |
+
"epoch": 0.0449867736154962,
|
9264 |
+
"grad_norm": 1.0617868900299072,
|
9265 |
+
"learning_rate": 2.712630984535469e-06,
|
9266 |
+
"loss": 2.1189,
|
9267 |
+
"step": 1318
|
9268 |
+
},
|
9269 |
+
{
|
9270 |
+
"epoch": 0.04502090622066729,
|
9271 |
+
"grad_norm": 1.4129559993743896,
|
9272 |
+
"learning_rate": 2.6614372008668674e-06,
|
9273 |
+
"loss": 2.262,
|
9274 |
+
"step": 1319
|
9275 |
+
},
|
9276 |
+
{
|
9277 |
+
"epoch": 0.04505503882583838,
|
9278 |
+
"grad_norm": 1.0555247068405151,
|
9279 |
+
"learning_rate": 2.6107245890045206e-06,
|
9280 |
+
"loss": 2.1948,
|
9281 |
+
"step": 1320
|
9282 |
+
},
|
9283 |
+
{
|
9284 |
+
"epoch": 0.04508917143100947,
|
9285 |
+
"grad_norm": 0.9239864945411682,
|
9286 |
+
"learning_rate": 2.5604933996351e-06,
|
9287 |
+
"loss": 1.971,
|
9288 |
+
"step": 1321
|
9289 |
+
},
|
9290 |
+
{
|
9291 |
+
"epoch": 0.04512330403618056,
|
9292 |
+
"grad_norm": 1.1590337753295898,
|
9293 |
+
"learning_rate": 2.510743881065447e-06,
|
9294 |
+
"loss": 2.2772,
|
9295 |
+
"step": 1322
|
9296 |
+
},
|
9297 |
+
{
|
9298 |
+
"epoch": 0.045157436641351655,
|
9299 |
+
"grad_norm": 1.143127202987671,
|
9300 |
+
"learning_rate": 2.461476279221375e-06,
|
9301 |
+
"loss": 1.7552,
|
9302 |
+
"step": 1323
|
9303 |
+
},
|
9304 |
+
{
|
9305 |
+
"epoch": 0.04519156924652274,
|
9306 |
+
"grad_norm": 1.202525019645691,
|
9307 |
+
"learning_rate": 2.41269083764647e-06,
|
9308 |
+
"loss": 1.9589,
|
9309 |
+
"step": 1324
|
9310 |
+
},
|
9311 |
+
{
|
9312 |
+
"epoch": 0.045225701851693834,
|
9313 |
+
"grad_norm": 1.19517982006073,
|
9314 |
+
"learning_rate": 2.364387797500822e-06,
|
9315 |
+
"loss": 1.8494,
|
9316 |
+
"step": 1325
|
9317 |
+
},
|
9318 |
+
{
|
9319 |
+
"epoch": 0.04525983445686492,
|
9320 |
+
"grad_norm": 0.7366655468940735,
|
9321 |
+
"learning_rate": 2.316567397559921e-06,
|
9322 |
+
"loss": 1.2583,
|
9323 |
+
"step": 1326
|
9324 |
+
},
|
9325 |
+
{
|
9326 |
+
"epoch": 0.04529396706203601,
|
9327 |
+
"grad_norm": 1.1081175804138184,
|
9328 |
+
"learning_rate": 2.269229874213419e-06,
|
9329 |
+
"loss": 2.2817,
|
9330 |
+
"step": 1327
|
9331 |
+
},
|
9332 |
+
{
|
9333 |
+
"epoch": 0.0453280996672071,
|
9334 |
+
"grad_norm": 1.0630899667739868,
|
9335 |
+
"learning_rate": 2.2223754614639436e-06,
|
9336 |
+
"loss": 2.4328,
|
9337 |
+
"step": 1328
|
9338 |
+
},
|
9339 |
+
{
|
9340 |
+
"epoch": 0.04536223227237819,
|
9341 |
+
"grad_norm": 1.29612135887146,
|
9342 |
+
"learning_rate": 2.176004390926001e-06,
|
9343 |
+
"loss": 2.2226,
|
9344 |
+
"step": 1329
|
9345 |
+
},
|
9346 |
+
{
|
9347 |
+
"epoch": 0.04539636487754928,
|
9348 |
+
"grad_norm": 1.076706886291504,
|
9349 |
+
"learning_rate": 2.130116891824796e-06,
|
9350 |
+
"loss": 1.7144,
|
9351 |
+
"step": 1330
|
9352 |
+
},
|
9353 |
+
{
|
9354 |
+
"epoch": 0.04543049748272037,
|
9355 |
+
"grad_norm": 1.2614665031433105,
|
9356 |
+
"learning_rate": 2.0847131909950913e-06,
|
9357 |
+
"loss": 2.1419,
|
9358 |
+
"step": 1331
|
9359 |
+
},
|
9360 |
+
{
|
9361 |
+
"epoch": 0.04546463008789146,
|
9362 |
+
"grad_norm": 1.1320222616195679,
|
9363 |
+
"learning_rate": 2.0397935128801283e-06,
|
9364 |
+
"loss": 2.2707,
|
9365 |
+
"step": 1332
|
9366 |
+
},
|
9367 |
+
{
|
9368 |
+
"epoch": 0.04549876269306255,
|
9369 |
+
"grad_norm": 0.7616283297538757,
|
9370 |
+
"learning_rate": 1.995358079530463e-06,
|
9371 |
+
"loss": 1.7522,
|
9372 |
+
"step": 1333
|
9373 |
+
},
|
9374 |
+
{
|
9375 |
+
"epoch": 0.045532895298233636,
|
9376 |
+
"grad_norm": 0.7978371977806091,
|
9377 |
+
"learning_rate": 1.951407110602899e-06,
|
9378 |
+
"loss": 1.646,
|
9379 |
+
"step": 1334
|
9380 |
+
},
|
9381 |
+
{
|
9382 |
+
"epoch": 0.04556702790340473,
|
9383 |
+
"grad_norm": 0.8964866399765015,
|
9384 |
+
"learning_rate": 1.9079408233594e-06,
|
9385 |
+
"loss": 1.8328,
|
9386 |
+
"step": 1335
|
9387 |
+
},
|
9388 |
+
{
|
9389 |
+
"epoch": 0.045601160508575815,
|
9390 |
+
"grad_norm": 1.170520544052124,
|
9391 |
+
"learning_rate": 1.8649594326660025e-06,
|
9392 |
+
"loss": 2.0892,
|
9393 |
+
"step": 1336
|
9394 |
+
},
|
9395 |
+
{
|
9396 |
+
"epoch": 0.04563529311374691,
|
9397 |
+
"grad_norm": 1.1115111112594604,
|
9398 |
+
"learning_rate": 1.822463150991771e-06,
|
9399 |
+
"loss": 2.1184,
|
9400 |
+
"step": 1337
|
9401 |
+
},
|
9402 |
+
{
|
9403 |
+
"epoch": 0.045669425718917994,
|
9404 |
+
"grad_norm": 0.9369651079177856,
|
9405 |
+
"learning_rate": 1.780452188407744e-06,
|
9406 |
+
"loss": 1.5654,
|
9407 |
+
"step": 1338
|
9408 |
+
},
|
9409 |
+
{
|
9410 |
+
"epoch": 0.04570355832408909,
|
9411 |
+
"grad_norm": 1.1608185768127441,
|
9412 |
+
"learning_rate": 1.738926752585901e-06,
|
9413 |
+
"loss": 2.1,
|
9414 |
+
"step": 1339
|
9415 |
+
},
|
9416 |
+
{
|
9417 |
+
"epoch": 0.04573769092926017,
|
9418 |
+
"grad_norm": 0.6818026900291443,
|
9419 |
+
"learning_rate": 1.697887048798108e-06,
|
9420 |
+
"loss": 0.7222,
|
9421 |
+
"step": 1340
|
9422 |
+
},
|
9423 |
+
{
|
9424 |
+
"epoch": 0.045771823534431266,
|
9425 |
+
"grad_norm": 1.2034977674484253,
|
9426 |
+
"learning_rate": 1.6573332799151076e-06,
|
9427 |
+
"loss": 1.6944,
|
9428 |
+
"step": 1341
|
9429 |
+
},
|
9430 |
+
{
|
9431 |
+
"epoch": 0.04580595613960235,
|
9432 |
+
"grad_norm": 1.2232041358947754,
|
9433 |
+
"learning_rate": 1.6172656464055747e-06,
|
9434 |
+
"loss": 2.3065,
|
9435 |
+
"step": 1342
|
9436 |
+
},
|
9437 |
+
{
|
9438 |
+
"epoch": 0.045840088744773445,
|
9439 |
+
"grad_norm": 1.057916522026062,
|
9440 |
+
"learning_rate": 1.5776843463350288e-06,
|
9441 |
+
"loss": 2.2412,
|
9442 |
+
"step": 1343
|
9443 |
+
},
|
9444 |
+
{
|
9445 |
+
"epoch": 0.04587422134994453,
|
9446 |
+
"grad_norm": 1.2213491201400757,
|
9447 |
+
"learning_rate": 1.538589575364946e-06,
|
9448 |
+
"loss": 2.0586,
|
9449 |
+
"step": 1344
|
9450 |
+
},
|
9451 |
+
{
|
9452 |
+
"epoch": 0.045908353955115624,
|
9453 |
+
"grad_norm": 1.393068790435791,
|
9454 |
+
"learning_rate": 1.4999815267517593e-06,
|
9455 |
+
"loss": 2.0436,
|
9456 |
+
"step": 1345
|
9457 |
+
},
|
9458 |
+
{
|
9459 |
+
"epoch": 0.04594248656028671,
|
9460 |
+
"grad_norm": 1.5732533931732178,
|
9461 |
+
"learning_rate": 1.4618603913458596e-06,
|
9462 |
+
"loss": 1.5546,
|
9463 |
+
"step": 1346
|
9464 |
+
},
|
9465 |
+
{
|
9466 |
+
"epoch": 0.045976619165457804,
|
9467 |
+
"grad_norm": 1.3528705835342407,
|
9468 |
+
"learning_rate": 1.4242263575906967e-06,
|
9469 |
+
"loss": 2.1396,
|
9470 |
+
"step": 1347
|
9471 |
+
},
|
9472 |
+
{
|
9473 |
+
"epoch": 0.0460107517706289,
|
9474 |
+
"grad_norm": 1.1608805656433105,
|
9475 |
+
"learning_rate": 1.3870796115218687e-06,
|
9476 |
+
"loss": 2.3557,
|
9477 |
+
"step": 1348
|
9478 |
+
},
|
9479 |
+
{
|
9480 |
+
"epoch": 0.04604488437579998,
|
9481 |
+
"grad_norm": 1.103907823562622,
|
9482 |
+
"learning_rate": 1.350420336766134e-06,
|
9483 |
+
"loss": 1.5824,
|
9484 |
+
"step": 1349
|
9485 |
+
},
|
9486 |
+
{
|
9487 |
+
"epoch": 0.046079016980971076,
|
9488 |
+
"grad_norm": 1.088552474975586,
|
9489 |
+
"learning_rate": 1.314248714540589e-06,
|
9490 |
+
"loss": 2.1526,
|
9491 |
+
"step": 1350
|
9492 |
+
},
|
9493 |
+
{
|
9494 |
+
"epoch": 0.04611314958614216,
|
9495 |
+
"grad_norm": 0.8980138301849365,
|
9496 |
+
"learning_rate": 1.2785649236517038e-06,
|
9497 |
+
"loss": 1.5136,
|
9498 |
+
"step": 1351
|
9499 |
+
},
|
9500 |
+
{
|
9501 |
+
"epoch": 0.046147282191313255,
|
9502 |
+
"grad_norm": 0.8953339457511902,
|
9503 |
+
"learning_rate": 1.2433691404944547e-06,
|
9504 |
+
"loss": 1.9919,
|
9505 |
+
"step": 1352
|
9506 |
+
},
|
9507 |
+
{
|
9508 |
+
"epoch": 0.04618141479648434,
|
9509 |
+
"grad_norm": 1.4286813735961914,
|
9510 |
+
"learning_rate": 1.2086615390514478e-06,
|
9511 |
+
"loss": 2.2634,
|
9512 |
+
"step": 1353
|
9513 |
+
},
|
9514 |
+
{
|
9515 |
+
"epoch": 0.046215547401655434,
|
9516 |
+
"grad_norm": 0.7742242217063904,
|
9517 |
+
"learning_rate": 1.1744422908921193e-06,
|
9518 |
+
"loss": 1.3261,
|
9519 |
+
"step": 1354
|
9520 |
+
},
|
9521 |
+
{
|
9522 |
+
"epoch": 0.04624968000682652,
|
9523 |
+
"grad_norm": 1.356886863708496,
|
9524 |
+
"learning_rate": 1.1407115651717703e-06,
|
9525 |
+
"loss": 2.2072,
|
9526 |
+
"step": 1355
|
9527 |
+
},
|
9528 |
+
{
|
9529 |
+
"epoch": 0.04628381261199761,
|
9530 |
+
"grad_norm": 1.1008986234664917,
|
9531 |
+
"learning_rate": 1.1074695286308667e-06,
|
9532 |
+
"loss": 2.063,
|
9533 |
+
"step": 1356
|
9534 |
+
},
|
9535 |
+
{
|
9536 |
+
"epoch": 0.0463179452171687,
|
9537 |
+
"grad_norm": 0.9620717167854309,
|
9538 |
+
"learning_rate": 1.0747163455941178e-06,
|
9539 |
+
"loss": 1.9879,
|
9540 |
+
"step": 1357
|
9541 |
+
},
|
9542 |
+
{
|
9543 |
+
"epoch": 0.04635207782233979,
|
9544 |
+
"grad_norm": 0.983932614326477,
|
9545 |
+
"learning_rate": 1.0424521779696662e-06,
|
9546 |
+
"loss": 1.4559,
|
9547 |
+
"step": 1358
|
9548 |
+
},
|
9549 |
+
{
|
9550 |
+
"epoch": 0.04638621042751088,
|
9551 |
+
"grad_norm": 1.123429536819458,
|
9552 |
+
"learning_rate": 1.0106771852483431e-06,
|
9553 |
+
"loss": 2.5002,
|
9554 |
+
"step": 1359
|
9555 |
+
},
|
9556 |
+
{
|
9557 |
+
"epoch": 0.04642034303268197,
|
9558 |
+
"grad_norm": 0.876412570476532,
|
9559 |
+
"learning_rate": 9.793915245028596e-07,
|
9560 |
+
"loss": 1.8467,
|
9561 |
+
"step": 1360
|
9562 |
+
},
|
9563 |
+
{
|
9564 |
+
"epoch": 0.04645447563785306,
|
9565 |
+
"grad_norm": 1.1459770202636719,
|
9566 |
+
"learning_rate": 9.485953503869826e-07,
|
9567 |
+
"loss": 2.0324,
|
9568 |
+
"step": 1361
|
9569 |
+
},
|
9570 |
+
{
|
9571 |
+
"epoch": 0.04648860824302415,
|
9572 |
+
"grad_norm": 1.2978296279907227,
|
9573 |
+
"learning_rate": 9.182888151348712e-07,
|
9574 |
+
"loss": 1.5813,
|
9575 |
+
"step": 1362
|
9576 |
+
},
|
9577 |
+
{
|
9578 |
+
"epoch": 0.046522740848195236,
|
9579 |
+
"grad_norm": 0.9082944393157959,
|
9580 |
+
"learning_rate": 8.884720685601977e-07,
|
9581 |
+
"loss": 1.4904,
|
9582 |
+
"step": 1363
|
9583 |
+
},
|
9584 |
+
{
|
9585 |
+
"epoch": 0.04655687345336633,
|
9586 |
+
"grad_norm": 1.0392401218414307,
|
9587 |
+
"learning_rate": 8.591452580555159e-07,
|
9588 |
+
"loss": 2.0467,
|
9589 |
+
"step": 1364
|
9590 |
+
},
|
9591 |
+
{
|
9592 |
+
"epoch": 0.046591006058537415,
|
9593 |
+
"grad_norm": 1.3390921354293823,
|
9594 |
+
"learning_rate": 8.303085285914613e-07,
|
9595 |
+
"loss": 2.3281,
|
9596 |
+
"step": 1365
|
9597 |
+
},
|
9598 |
+
{
|
9599 |
+
"epoch": 0.04662513866370851,
|
9600 |
+
"grad_norm": 1.191910743713379,
|
9601 |
+
"learning_rate": 8.019620227160851e-07,
|
9602 |
+
"loss": 2.1539,
|
9603 |
+
"step": 1366
|
9604 |
+
},
|
9605 |
+
{
|
9606 |
+
"epoch": 0.046659271268879594,
|
9607 |
+
"grad_norm": 1.0370430946350098,
|
9608 |
+
"learning_rate": 7.741058805540991e-07,
|
9609 |
+
"loss": 1.8993,
|
9610 |
+
"step": 1367
|
9611 |
+
},
|
9612 |
+
{
|
9613 |
+
"epoch": 0.04669340387405069,
|
9614 |
+
"grad_norm": 1.097701907157898,
|
9615 |
+
"learning_rate": 7.467402398062206e-07,
|
9616 |
+
"loss": 1.907,
|
9617 |
+
"step": 1368
|
9618 |
+
},
|
9619 |
+
{
|
9620 |
+
"epoch": 0.04672753647922177,
|
9621 |
+
"grad_norm": 1.2112807035446167,
|
9622 |
+
"learning_rate": 7.19865235748507e-07,
|
9623 |
+
"loss": 1.9657,
|
9624 |
+
"step": 1369
|
9625 |
+
},
|
9626 |
+
{
|
9627 |
+
"epoch": 0.046761669084392866,
|
9628 |
+
"grad_norm": 0.861199676990509,
|
9629 |
+
"learning_rate": 6.934810012316217e-07,
|
9630 |
+
"loss": 1.5202,
|
9631 |
+
"step": 1370
|
9632 |
+
},
|
9633 |
+
{
|
9634 |
+
"epoch": 0.04679580168956395,
|
9635 |
+
"grad_norm": 1.2888405323028564,
|
9636 |
+
"learning_rate": 6.675876666802361e-07,
|
9637 |
+
"loss": 1.8849,
|
9638 |
+
"step": 1371
|
9639 |
+
},
|
9640 |
+
{
|
9641 |
+
"epoch": 0.046829934294735046,
|
9642 |
+
"grad_norm": 1.7592045068740845,
|
9643 |
+
"learning_rate": 6.421853600923622e-07,
|
9644 |
+
"loss": 1.6272,
|
9645 |
+
"step": 1372
|
9646 |
+
},
|
9647 |
+
{
|
9648 |
+
"epoch": 0.04686406689990614,
|
9649 |
+
"grad_norm": 1.055127501487732,
|
9650 |
+
"learning_rate": 6.172742070387206e-07,
|
9651 |
+
"loss": 2.213,
|
9652 |
+
"step": 1373
|
9653 |
+
},
|
9654 |
+
{
|
9655 |
+
"epoch": 0.046898199505077225,
|
9656 |
+
"grad_norm": 1.2827503681182861,
|
9657 |
+
"learning_rate": 5.928543306621293e-07,
|
9658 |
+
"loss": 1.9139,
|
9659 |
+
"step": 1374
|
9660 |
+
},
|
9661 |
+
{
|
9662 |
+
"epoch": 0.04693233211024832,
|
9663 |
+
"grad_norm": 0.9494105577468872,
|
9664 |
+
"learning_rate": 5.689258516768825e-07,
|
9665 |
+
"loss": 1.7016,
|
9666 |
+
"step": 1375
|
9667 |
+
},
|
9668 |
+
{
|
9669 |
+
"epoch": 0.046966464715419404,
|
9670 |
+
"grad_norm": 1.2929176092147827,
|
9671 |
+
"learning_rate": 5.454888883681731e-07,
|
9672 |
+
"loss": 2.2336,
|
9673 |
+
"step": 1376
|
9674 |
+
},
|
9675 |
+
{
|
9676 |
+
"epoch": 0.0470005973205905,
|
9677 |
+
"grad_norm": 1.0790014266967773,
|
9678 |
+
"learning_rate": 5.225435565914483e-07,
|
9679 |
+
"loss": 2.2177,
|
9680 |
+
"step": 1377
|
9681 |
+
},
|
9682 |
+
{
|
9683 |
+
"epoch": 0.04703472992576158,
|
9684 |
+
"grad_norm": 1.84797203540802,
|
9685 |
+
"learning_rate": 5.000899697719552e-07,
|
9686 |
+
"loss": 2.3655,
|
9687 |
+
"step": 1378
|
9688 |
+
},
|
9689 |
+
{
|
9690 |
+
"epoch": 0.047068862530932676,
|
9691 |
+
"grad_norm": 1.0962165594100952,
|
9692 |
+
"learning_rate": 4.781282389040409e-07,
|
9693 |
+
"loss": 2.1633,
|
9694 |
+
"step": 1379
|
9695 |
+
},
|
9696 |
+
{
|
9697 |
+
"epoch": 0.04710299513610376,
|
9698 |
+
"grad_norm": 1.2822717428207397,
|
9699 |
+
"learning_rate": 4.5665847255070835e-07,
|
9700 |
+
"loss": 1.9561,
|
9701 |
+
"step": 1380
|
9702 |
+
},
|
9703 |
+
{
|
9704 |
+
"epoch": 0.047137127741274855,
|
9705 |
+
"grad_norm": 0.8645843267440796,
|
9706 |
+
"learning_rate": 4.3568077684302823e-07,
|
9707 |
+
"loss": 1.1164,
|
9708 |
+
"step": 1381
|
9709 |
+
},
|
9710 |
+
{
|
9711 |
+
"epoch": 0.04717126034644594,
|
9712 |
+
"grad_norm": 1.2501182556152344,
|
9713 |
+
"learning_rate": 4.15195255479639e-07,
|
9714 |
+
"loss": 2.2892,
|
9715 |
+
"step": 1382
|
9716 |
+
},
|
9717 |
+
{
|
9718 |
+
"epoch": 0.047205392951617034,
|
9719 |
+
"grad_norm": 1.5437196493148804,
|
9720 |
+
"learning_rate": 3.9520200972621436e-07,
|
9721 |
+
"loss": 2.2557,
|
9722 |
+
"step": 1383
|
9723 |
+
},
|
9724 |
+
{
|
9725 |
+
"epoch": 0.04723952555678812,
|
9726 |
+
"grad_norm": 1.025200605392456,
|
9727 |
+
"learning_rate": 3.7570113841497447e-07,
|
9728 |
+
"loss": 1.8875,
|
9729 |
+
"step": 1384
|
9730 |
+
},
|
9731 |
+
{
|
9732 |
+
"epoch": 0.04727365816195921,
|
9733 |
+
"grad_norm": 1.0940229892730713,
|
9734 |
+
"learning_rate": 3.5669273794418644e-07,
|
9735 |
+
"loss": 2.3751,
|
9736 |
+
"step": 1385
|
9737 |
+
},
|
9738 |
+
{
|
9739 |
+
"epoch": 0.0473077907671303,
|
9740 |
+
"grad_norm": 1.1144181489944458,
|
9741 |
+
"learning_rate": 3.381769022776982e-07,
|
9742 |
+
"loss": 2.439,
|
9743 |
+
"step": 1386
|
9744 |
+
},
|
9745 |
+
{
|
9746 |
+
"epoch": 0.04734192337230139,
|
9747 |
+
"grad_norm": 1.037879228591919,
|
9748 |
+
"learning_rate": 3.2015372294450507e-07,
|
9749 |
+
"loss": 2.4142,
|
9750 |
+
"step": 1387
|
9751 |
+
},
|
9752 |
+
{
|
9753 |
+
"epoch": 0.04737605597747248,
|
9754 |
+
"grad_norm": 1.1448603868484497,
|
9755 |
+
"learning_rate": 3.0262328903820633e-07,
|
9756 |
+
"loss": 2.1559,
|
9757 |
+
"step": 1388
|
9758 |
+
},
|
9759 |
+
{
|
9760 |
+
"epoch": 0.04741018858264357,
|
9761 |
+
"grad_norm": 0.9735068082809448,
|
9762 |
+
"learning_rate": 2.855856872166607e-07,
|
9763 |
+
"loss": 1.8201,
|
9764 |
+
"step": 1389
|
9765 |
+
},
|
9766 |
+
{
|
9767 |
+
"epoch": 0.04744432118781466,
|
9768 |
+
"grad_norm": 1.095896601676941,
|
9769 |
+
"learning_rate": 2.6904100170150883e-07,
|
9770 |
+
"loss": 1.8875,
|
9771 |
+
"step": 1390
|
9772 |
+
},
|
9773 |
+
{
|
9774 |
+
"epoch": 0.04747845379298575,
|
9775 |
+
"grad_norm": 1.1156182289123535,
|
9776 |
+
"learning_rate": 2.5298931427777396e-07,
|
9777 |
+
"loss": 1.6502,
|
9778 |
+
"step": 1391
|
9779 |
+
},
|
9780 |
+
{
|
9781 |
+
"epoch": 0.047512586398156836,
|
9782 |
+
"grad_norm": 1.6896687746047974,
|
9783 |
+
"learning_rate": 2.3743070429345093e-07,
|
9784 |
+
"loss": 2.1804,
|
9785 |
+
"step": 1392
|
9786 |
+
},
|
9787 |
+
{
|
9788 |
+
"epoch": 0.04754671900332793,
|
9789 |
+
"grad_norm": 1.2774807214736938,
|
9790 |
+
"learning_rate": 2.2236524865910658e-07,
|
9791 |
+
"loss": 1.851,
|
9792 |
+
"step": 1393
|
9793 |
+
},
|
9794 |
+
{
|
9795 |
+
"epoch": 0.047580851608499015,
|
9796 |
+
"grad_norm": 0.9748452305793762,
|
9797 |
+
"learning_rate": 2.0779302184751325e-07,
|
9798 |
+
"loss": 1.6417,
|
9799 |
+
"step": 1394
|
9800 |
+
},
|
9801 |
+
{
|
9802 |
+
"epoch": 0.04761498421367011,
|
9803 |
+
"grad_norm": 1.2440422773361206,
|
9804 |
+
"learning_rate": 1.937140958932604e-07,
|
9805 |
+
"loss": 2.4976,
|
9806 |
+
"step": 1395
|
9807 |
+
},
|
9808 |
+
{
|
9809 |
+
"epoch": 0.0476491168188412,
|
9810 |
+
"grad_norm": 1.1557978391647339,
|
9811 |
+
"learning_rate": 1.8012854039244353e-07,
|
9812 |
+
"loss": 2.0424,
|
9813 |
+
"step": 1396
|
9814 |
+
},
|
9815 |
+
{
|
9816 |
+
"epoch": 0.04768324942401229,
|
9817 |
+
"grad_norm": 1.2029125690460205,
|
9818 |
+
"learning_rate": 1.670364225022758e-07,
|
9819 |
+
"loss": 1.5984,
|
9820 |
+
"step": 1397
|
9821 |
+
},
|
9822 |
+
{
|
9823 |
+
"epoch": 0.04771738202918338,
|
9824 |
+
"grad_norm": 1.150215744972229,
|
9825 |
+
"learning_rate": 1.5443780694074373e-07,
|
9826 |
+
"loss": 2.3163,
|
9827 |
+
"step": 1398
|
9828 |
+
},
|
9829 |
+
{
|
9830 |
+
"epoch": 0.04775151463435447,
|
9831 |
+
"grad_norm": 1.171433448791504,
|
9832 |
+
"learning_rate": 1.4233275598635186e-07,
|
9833 |
+
"loss": 1.9329,
|
9834 |
+
"step": 1399
|
9835 |
+
},
|
9836 |
+
{
|
9837 |
+
"epoch": 0.04778564723952556,
|
9838 |
+
"grad_norm": 1.04646897315979,
|
9839 |
+
"learning_rate": 1.307213294777676e-07,
|
9840 |
+
"loss": 1.9947,
|
9841 |
+
"step": 1400
|
9842 |
+
},
|
9843 |
+
{
|
9844 |
+
"epoch": 0.047819779844696646,
|
9845 |
+
"grad_norm": 1.10426926612854,
|
9846 |
+
"learning_rate": 1.196035848135102e-07,
|
9847 |
+
"loss": 2.1735,
|
9848 |
+
"step": 1401
|
9849 |
+
},
|
9850 |
+
{
|
9851 |
+
"epoch": 0.04785391244986774,
|
9852 |
+
"grad_norm": 1.1013439893722534,
|
9853 |
+
"learning_rate": 1.0897957695171768e-07,
|
9854 |
+
"loss": 1.9856,
|
9855 |
+
"step": 1402
|
9856 |
+
},
|
9857 |
+
{
|
9858 |
+
"epoch": 0.047888045055038825,
|
9859 |
+
"grad_norm": 1.0263570547103882,
|
9860 |
+
"learning_rate": 9.88493584098471e-08,
|
9861 |
+
"loss": 1.9754,
|
9862 |
+
"step": 1403
|
9863 |
+
},
|
9864 |
+
{
|
9865 |
+
"epoch": 0.04792217766020992,
|
9866 |
+
"grad_norm": 1.1793404817581177,
|
9867 |
+
"learning_rate": 8.921297926439698e-08,
|
9868 |
+
"loss": 2.2876,
|
9869 |
+
"step": 1404
|
9870 |
+
},
|
9871 |
+
{
|
9872 |
+
"epoch": 0.047956310265381004,
|
9873 |
+
"grad_norm": 1.1636236906051636,
|
9874 |
+
"learning_rate": 8.007048715068521e-08,
|
9875 |
+
"loss": 2.5088,
|
9876 |
+
"step": 1405
|
9877 |
+
},
|
9878 |
+
{
|
9879 |
+
"epoch": 0.0479904428705521,
|
9880 |
+
"grad_norm": 0.9703890681266785,
|
9881 |
+
"learning_rate": 7.142192726263818e-08,
|
9882 |
+
"loss": 1.7129,
|
9883 |
+
"step": 1406
|
9884 |
+
},
|
9885 |
+
{
|
9886 |
+
"epoch": 0.04802457547572318,
|
9887 |
+
"grad_norm": 0.963749349117279,
|
9888 |
+
"learning_rate": 6.326734235249099e-08,
|
9889 |
+
"loss": 2.3956,
|
9890 |
+
"step": 1407
|
9891 |
+
},
|
9892 |
+
{
|
9893 |
+
"epoch": 0.048058708080894276,
|
9894 |
+
"grad_norm": 1.2318871021270752,
|
9895 |
+
"learning_rate": 5.560677273064307e-08,
|
9896 |
+
"loss": 2.4345,
|
9897 |
+
"step": 1408
|
9898 |
+
},
|
9899 |
+
{
|
9900 |
+
"epoch": 0.04809284068606536,
|
9901 |
+
"grad_norm": 0.8953921794891357,
|
9902 |
+
"learning_rate": 4.8440256265447345e-08,
|
9903 |
+
"loss": 2.1513,
|
9904 |
+
"step": 1409
|
9905 |
+
},
|
9906 |
+
{
|
9907 |
+
"epoch": 0.048126973291236455,
|
9908 |
+
"grad_norm": 0.9674501419067383,
|
9909 |
+
"learning_rate": 4.17678283830103e-08,
|
9910 |
+
"loss": 1.796,
|
9911 |
+
"step": 1410
|
9912 |
+
},
|
9913 |
+
{
|
9914 |
+
"epoch": 0.04816110589640754,
|
9915 |
+
"grad_norm": 0.9000769257545471,
|
9916 |
+
"learning_rate": 3.5589522066992176e-08,
|
9917 |
+
"loss": 2.0396,
|
9918 |
+
"step": 1411
|
9919 |
+
},
|
9920 |
+
{
|
9921 |
+
"epoch": 0.048195238501578634,
|
9922 |
+
"grad_norm": 1.0316624641418457,
|
9923 |
+
"learning_rate": 2.9905367858507064e-08,
|
9924 |
+
"loss": 2.2712,
|
9925 |
+
"step": 1412
|
9926 |
+
},
|
9927 |
+
{
|
9928 |
+
"epoch": 0.04822937110674972,
|
9929 |
+
"grad_norm": 1.808516025543213,
|
9930 |
+
"learning_rate": 2.471539385592303e-08,
|
9931 |
+
"loss": 2.2795,
|
9932 |
+
"step": 1413
|
9933 |
+
},
|
9934 |
+
{
|
9935 |
+
"epoch": 0.04826350371192081,
|
9936 |
+
"grad_norm": 1.3263667821884155,
|
9937 |
+
"learning_rate": 2.0019625714740032e-08,
|
9938 |
+
"loss": 2.3736,
|
9939 |
+
"step": 1414
|
9940 |
+
},
|
9941 |
+
{
|
9942 |
+
"epoch": 0.0482976363170919,
|
9943 |
+
"grad_norm": 1.1833118200302124,
|
9944 |
+
"learning_rate": 1.5818086647445552e-08,
|
9945 |
+
"loss": 2.6563,
|
9946 |
+
"step": 1415
|
9947 |
+
},
|
9948 |
+
{
|
9949 |
+
"epoch": 0.04833176892226299,
|
9950 |
+
"grad_norm": 0.820840060710907,
|
9951 |
+
"learning_rate": 1.2110797423436904e-08,
|
9952 |
+
"loss": 1.5796,
|
9953 |
+
"step": 1416
|
9954 |
+
},
|
9955 |
+
{
|
9956 |
+
"epoch": 0.04836590152743408,
|
9957 |
+
"grad_norm": 1.4361692667007446,
|
9958 |
+
"learning_rate": 8.897776368865795e-09,
|
9959 |
+
"loss": 2.1282,
|
9960 |
+
"step": 1417
|
9961 |
+
},
|
9962 |
+
{
|
9963 |
+
"epoch": 0.04840003413260517,
|
9964 |
+
"grad_norm": 0.9566254615783691,
|
9965 |
+
"learning_rate": 6.179039366616124e-09,
|
9966 |
+
"loss": 2.0188,
|
9967 |
+
"step": 1418
|
9968 |
+
},
|
9969 |
+
{
|
9970 |
+
"epoch": 0.04843416673777626,
|
9971 |
+
"grad_norm": 1.096156120300293,
|
9972 |
+
"learning_rate": 3.954599856159646e-09,
|
9973 |
+
"loss": 1.7192,
|
9974 |
+
"step": 1419
|
9975 |
+
},
|
9976 |
+
{
|
9977 |
+
"epoch": 0.04846829934294735,
|
9978 |
+
"grad_norm": 1.2958521842956543,
|
9979 |
+
"learning_rate": 2.224468833522675e-09,
|
9980 |
+
"loss": 1.9972,
|
9981 |
+
"step": 1420
|
9982 |
+
},
|
9983 |
+
{
|
9984 |
+
"epoch": 0.048502431948118443,
|
9985 |
+
"grad_norm": 1.376560926437378,
|
9986 |
+
"learning_rate": 9.886548512305637e-10,
|
9987 |
+
"loss": 2.5841,
|
9988 |
+
"step": 1421
|
9989 |
+
},
|
9990 |
+
{
|
9991 |
+
"epoch": 0.04853656455328953,
|
9992 |
+
"grad_norm": 1.141074776649475,
|
9993 |
+
"learning_rate": 2.471640182633017e-10,
|
9994 |
+
"loss": 1.9833,
|
9995 |
+
"step": 1422
|
9996 |
+
},
|
9997 |
+
{
|
9998 |
+
"epoch": 0.04857069715846062,
|
9999 |
+
"grad_norm": 0.9498074650764465,
|
10000 |
+
"learning_rate": 0.0,
|
10001 |
+
"loss": 1.4883,
|
10002 |
+
"step": 1423
|
10003 |
}
|
10004 |
],
|
10005 |
"logging_steps": 1,
|
|
|
10014 |
"should_evaluate": false,
|
10015 |
"should_log": false,
|
10016 |
"should_save": true,
|
10017 |
+
"should_training_stop": true
|
10018 |
},
|
10019 |
"attributes": {}
|
10020 |
}
|
10021 |
},
|
10022 |
+
"total_flos": 3.921291026576179e+16,
|
10023 |
"train_batch_size": 2,
|
10024 |
"trial_name": null,
|
10025 |
"trial_params": null
|