Training in progress, step 1355, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15182728
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:86cf5d095361a4731d2eea2c0131ca044f5dcbbb6b544aa5ff5180c1191cc2b4
|
3 |
size 15182728
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 8076116
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:31d258b1c3f0bf8214fc480e4607fea05f39d8e610d0ed88e736cd74241ea4be
|
3 |
size 8076116
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:98135b59c94085902fbfb43dccf7491a3e4decaabe92bca4249483cc4eb6263e
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5077679a6cf126dd20c7ffc0d03806f2895bfc3955fe02f2ed3f8ec7635202f
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 339,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -7158,6 +7158,2372 @@
|
|
7158 |
"eval_samples_per_second": 50.141,
|
7159 |
"eval_steps_per_second": 25.081,
|
7160 |
"step": 1017
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7161 |
}
|
7162 |
],
|
7163 |
"logging_steps": 1,
|
@@ -7172,12 +9538,12 @@
|
|
7172 |
"should_evaluate": false,
|
7173 |
"should_log": false,
|
7174 |
"should_save": true,
|
7175 |
-
"should_training_stop":
|
7176 |
},
|
7177 |
"attributes": {}
|
7178 |
}
|
7179 |
},
|
7180 |
-
"total_flos":
|
7181 |
"train_batch_size": 2,
|
7182 |
"trial_name": null,
|
7183 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.22931122017261804,
|
5 |
"eval_steps": 339,
|
6 |
+
"global_step": 1355,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
7158 |
"eval_samples_per_second": 50.141,
|
7159 |
"eval_steps_per_second": 25.081,
|
7160 |
"step": 1017
|
7161 |
+
},
|
7162 |
+
{
|
7163 |
+
"epoch": 0.1722795735319005,
|
7164 |
+
"grad_norm": 4.3395514488220215,
|
7165 |
+
"learning_rate": 2.9413302519580154e-05,
|
7166 |
+
"loss": 2.2645,
|
7167 |
+
"step": 1018
|
7168 |
+
},
|
7169 |
+
{
|
7170 |
+
"epoch": 0.1724488069047216,
|
7171 |
+
"grad_norm": 3.3445119857788086,
|
7172 |
+
"learning_rate": 2.9248043205703567e-05,
|
7173 |
+
"loss": 2.7109,
|
7174 |
+
"step": 1019
|
7175 |
+
},
|
7176 |
+
{
|
7177 |
+
"epoch": 0.17261804027754274,
|
7178 |
+
"grad_norm": 3.4657747745513916,
|
7179 |
+
"learning_rate": 2.9083169897275552e-05,
|
7180 |
+
"loss": 1.8132,
|
7181 |
+
"step": 1020
|
7182 |
+
},
|
7183 |
+
{
|
7184 |
+
"epoch": 0.17278727365036387,
|
7185 |
+
"grad_norm": 3.864420175552368,
|
7186 |
+
"learning_rate": 2.8918683493804787e-05,
|
7187 |
+
"loss": 2.4348,
|
7188 |
+
"step": 1021
|
7189 |
+
},
|
7190 |
+
{
|
7191 |
+
"epoch": 0.17295650702318496,
|
7192 |
+
"grad_norm": 4.884124279022217,
|
7193 |
+
"learning_rate": 2.875458489268904e-05,
|
7194 |
+
"loss": 2.755,
|
7195 |
+
"step": 1022
|
7196 |
+
},
|
7197 |
+
{
|
7198 |
+
"epoch": 0.1731257403960061,
|
7199 |
+
"grad_norm": 4.4030938148498535,
|
7200 |
+
"learning_rate": 2.8590874989210304e-05,
|
7201 |
+
"loss": 2.3002,
|
7202 |
+
"step": 1023
|
7203 |
+
},
|
7204 |
+
{
|
7205 |
+
"epoch": 0.17329497376882722,
|
7206 |
+
"grad_norm": 4.5884528160095215,
|
7207 |
+
"learning_rate": 2.842755467652999e-05,
|
7208 |
+
"loss": 2.8227,
|
7209 |
+
"step": 1024
|
7210 |
+
},
|
7211 |
+
{
|
7212 |
+
"epoch": 0.17346420714164834,
|
7213 |
+
"grad_norm": 4.153069496154785,
|
7214 |
+
"learning_rate": 2.8264624845683894e-05,
|
7215 |
+
"loss": 2.3139,
|
7216 |
+
"step": 1025
|
7217 |
+
},
|
7218 |
+
{
|
7219 |
+
"epoch": 0.17363344051446947,
|
7220 |
+
"grad_norm": 3.6545069217681885,
|
7221 |
+
"learning_rate": 2.810208638557753e-05,
|
7222 |
+
"loss": 2.8842,
|
7223 |
+
"step": 1026
|
7224 |
+
},
|
7225 |
+
{
|
7226 |
+
"epoch": 0.17380267388729057,
|
7227 |
+
"grad_norm": 4.083312034606934,
|
7228 |
+
"learning_rate": 2.7939940182981163e-05,
|
7229 |
+
"loss": 2.5912,
|
7230 |
+
"step": 1027
|
7231 |
+
},
|
7232 |
+
{
|
7233 |
+
"epoch": 0.1739719072601117,
|
7234 |
+
"grad_norm": 4.298991680145264,
|
7235 |
+
"learning_rate": 2.7778187122525022e-05,
|
7236 |
+
"loss": 2.9115,
|
7237 |
+
"step": 1028
|
7238 |
+
},
|
7239 |
+
{
|
7240 |
+
"epoch": 0.17414114063293282,
|
7241 |
+
"grad_norm": 4.7720947265625,
|
7242 |
+
"learning_rate": 2.7616828086694357e-05,
|
7243 |
+
"loss": 2.3173,
|
7244 |
+
"step": 1029
|
7245 |
+
},
|
7246 |
+
{
|
7247 |
+
"epoch": 0.17431037400575394,
|
7248 |
+
"grad_norm": 4.140947341918945,
|
7249 |
+
"learning_rate": 2.7455863955824813e-05,
|
7250 |
+
"loss": 2.4235,
|
7251 |
+
"step": 1030
|
7252 |
+
},
|
7253 |
+
{
|
7254 |
+
"epoch": 0.17447960737857504,
|
7255 |
+
"grad_norm": 4.7132182121276855,
|
7256 |
+
"learning_rate": 2.729529560809748e-05,
|
7257 |
+
"loss": 3.1484,
|
7258 |
+
"step": 1031
|
7259 |
+
},
|
7260 |
+
{
|
7261 |
+
"epoch": 0.17464884075139617,
|
7262 |
+
"grad_norm": 4.724729061126709,
|
7263 |
+
"learning_rate": 2.713512391953419e-05,
|
7264 |
+
"loss": 2.7419,
|
7265 |
+
"step": 1032
|
7266 |
+
},
|
7267 |
+
{
|
7268 |
+
"epoch": 0.1748180741242173,
|
7269 |
+
"grad_norm": 4.051872730255127,
|
7270 |
+
"learning_rate": 2.6975349763992676e-05,
|
7271 |
+
"loss": 2.6863,
|
7272 |
+
"step": 1033
|
7273 |
+
},
|
7274 |
+
{
|
7275 |
+
"epoch": 0.17498730749703842,
|
7276 |
+
"grad_norm": 3.0022430419921875,
|
7277 |
+
"learning_rate": 2.6815974013161792e-05,
|
7278 |
+
"loss": 1.9531,
|
7279 |
+
"step": 1034
|
7280 |
+
},
|
7281 |
+
{
|
7282 |
+
"epoch": 0.17515654086985954,
|
7283 |
+
"grad_norm": 3.270108699798584,
|
7284 |
+
"learning_rate": 2.665699753655684e-05,
|
7285 |
+
"loss": 1.9829,
|
7286 |
+
"step": 1035
|
7287 |
+
},
|
7288 |
+
{
|
7289 |
+
"epoch": 0.17532577424268064,
|
7290 |
+
"grad_norm": 5.108510494232178,
|
7291 |
+
"learning_rate": 2.649842120151481e-05,
|
7292 |
+
"loss": 2.8479,
|
7293 |
+
"step": 1036
|
7294 |
+
},
|
7295 |
+
{
|
7296 |
+
"epoch": 0.17549500761550177,
|
7297 |
+
"grad_norm": 3.7781755924224854,
|
7298 |
+
"learning_rate": 2.6340245873189506e-05,
|
7299 |
+
"loss": 2.3995,
|
7300 |
+
"step": 1037
|
7301 |
+
},
|
7302 |
+
{
|
7303 |
+
"epoch": 0.1756642409883229,
|
7304 |
+
"grad_norm": 4.705026626586914,
|
7305 |
+
"learning_rate": 2.6182472414547054e-05,
|
7306 |
+
"loss": 2.926,
|
7307 |
+
"step": 1038
|
7308 |
+
},
|
7309 |
+
{
|
7310 |
+
"epoch": 0.17583347436114402,
|
7311 |
+
"grad_norm": 3.566922664642334,
|
7312 |
+
"learning_rate": 2.6025101686360997e-05,
|
7313 |
+
"loss": 2.4755,
|
7314 |
+
"step": 1039
|
7315 |
+
},
|
7316 |
+
{
|
7317 |
+
"epoch": 0.17600270773396515,
|
7318 |
+
"grad_norm": 3.7031877040863037,
|
7319 |
+
"learning_rate": 2.5868134547207713e-05,
|
7320 |
+
"loss": 2.2235,
|
7321 |
+
"step": 1040
|
7322 |
+
},
|
7323 |
+
{
|
7324 |
+
"epoch": 0.17617194110678625,
|
7325 |
+
"grad_norm": 3.741117477416992,
|
7326 |
+
"learning_rate": 2.57115718534617e-05,
|
7327 |
+
"loss": 2.1753,
|
7328 |
+
"step": 1041
|
7329 |
+
},
|
7330 |
+
{
|
7331 |
+
"epoch": 0.17634117447960737,
|
7332 |
+
"grad_norm": 5.424650192260742,
|
7333 |
+
"learning_rate": 2.5555414459290928e-05,
|
7334 |
+
"loss": 2.8477,
|
7335 |
+
"step": 1042
|
7336 |
+
},
|
7337 |
+
{
|
7338 |
+
"epoch": 0.1765104078524285,
|
7339 |
+
"grad_norm": 4.087668418884277,
|
7340 |
+
"learning_rate": 2.5399663216652046e-05,
|
7341 |
+
"loss": 2.015,
|
7342 |
+
"step": 1043
|
7343 |
+
},
|
7344 |
+
{
|
7345 |
+
"epoch": 0.17667964122524962,
|
7346 |
+
"grad_norm": 3.9556102752685547,
|
7347 |
+
"learning_rate": 2.524431897528595e-05,
|
7348 |
+
"loss": 2.5602,
|
7349 |
+
"step": 1044
|
7350 |
+
},
|
7351 |
+
{
|
7352 |
+
"epoch": 0.17684887459807075,
|
7353 |
+
"grad_norm": 3.6782662868499756,
|
7354 |
+
"learning_rate": 2.5089382582712994e-05,
|
7355 |
+
"loss": 2.3212,
|
7356 |
+
"step": 1045
|
7357 |
+
},
|
7358 |
+
{
|
7359 |
+
"epoch": 0.17701810797089185,
|
7360 |
+
"grad_norm": 4.841695308685303,
|
7361 |
+
"learning_rate": 2.4934854884228432e-05,
|
7362 |
+
"loss": 2.9354,
|
7363 |
+
"step": 1046
|
7364 |
+
},
|
7365 |
+
{
|
7366 |
+
"epoch": 0.17718734134371297,
|
7367 |
+
"grad_norm": 3.525268793106079,
|
7368 |
+
"learning_rate": 2.4780736722897714e-05,
|
7369 |
+
"loss": 2.0289,
|
7370 |
+
"step": 1047
|
7371 |
+
},
|
7372 |
+
{
|
7373 |
+
"epoch": 0.1773565747165341,
|
7374 |
+
"grad_norm": 3.9872353076934814,
|
7375 |
+
"learning_rate": 2.462702893955202e-05,
|
7376 |
+
"loss": 2.4513,
|
7377 |
+
"step": 1048
|
7378 |
+
},
|
7379 |
+
{
|
7380 |
+
"epoch": 0.17752580808935522,
|
7381 |
+
"grad_norm": 3.6817212104797363,
|
7382 |
+
"learning_rate": 2.4473732372783588e-05,
|
7383 |
+
"loss": 1.967,
|
7384 |
+
"step": 1049
|
7385 |
+
},
|
7386 |
+
{
|
7387 |
+
"epoch": 0.17769504146217635,
|
7388 |
+
"grad_norm": 4.60300874710083,
|
7389 |
+
"learning_rate": 2.4320847858941164e-05,
|
7390 |
+
"loss": 2.4409,
|
7391 |
+
"step": 1050
|
7392 |
+
},
|
7393 |
+
{
|
7394 |
+
"epoch": 0.17786427483499745,
|
7395 |
+
"grad_norm": 4.4125776290893555,
|
7396 |
+
"learning_rate": 2.4168376232125467e-05,
|
7397 |
+
"loss": 2.9286,
|
7398 |
+
"step": 1051
|
7399 |
+
},
|
7400 |
+
{
|
7401 |
+
"epoch": 0.17803350820781857,
|
7402 |
+
"grad_norm": 3.7913129329681396,
|
7403 |
+
"learning_rate": 2.4016318324184495e-05,
|
7404 |
+
"loss": 2.7111,
|
7405 |
+
"step": 1052
|
7406 |
+
},
|
7407 |
+
{
|
7408 |
+
"epoch": 0.1782027415806397,
|
7409 |
+
"grad_norm": 3.4860150814056396,
|
7410 |
+
"learning_rate": 2.38646749647092e-05,
|
7411 |
+
"loss": 2.1972,
|
7412 |
+
"step": 1053
|
7413 |
+
},
|
7414 |
+
{
|
7415 |
+
"epoch": 0.17837197495346083,
|
7416 |
+
"grad_norm": 3.3643290996551514,
|
7417 |
+
"learning_rate": 2.371344698102883e-05,
|
7418 |
+
"loss": 2.1093,
|
7419 |
+
"step": 1054
|
7420 |
+
},
|
7421 |
+
{
|
7422 |
+
"epoch": 0.17854120832628195,
|
7423 |
+
"grad_norm": 4.35898494720459,
|
7424 |
+
"learning_rate": 2.356263519820647e-05,
|
7425 |
+
"loss": 2.7809,
|
7426 |
+
"step": 1055
|
7427 |
+
},
|
7428 |
+
{
|
7429 |
+
"epoch": 0.17871044169910305,
|
7430 |
+
"grad_norm": 3.5371789932250977,
|
7431 |
+
"learning_rate": 2.3412240439034428e-05,
|
7432 |
+
"loss": 1.9207,
|
7433 |
+
"step": 1056
|
7434 |
+
},
|
7435 |
+
{
|
7436 |
+
"epoch": 0.17887967507192418,
|
7437 |
+
"grad_norm": 4.014754295349121,
|
7438 |
+
"learning_rate": 2.3262263524029924e-05,
|
7439 |
+
"loss": 2.5513,
|
7440 |
+
"step": 1057
|
7441 |
+
},
|
7442 |
+
{
|
7443 |
+
"epoch": 0.1790489084447453,
|
7444 |
+
"grad_norm": 3.7930805683135986,
|
7445 |
+
"learning_rate": 2.3112705271430468e-05,
|
7446 |
+
"loss": 1.8225,
|
7447 |
+
"step": 1058
|
7448 |
+
},
|
7449 |
+
{
|
7450 |
+
"epoch": 0.17921814181756643,
|
7451 |
+
"grad_norm": 4.399813652038574,
|
7452 |
+
"learning_rate": 2.2963566497189528e-05,
|
7453 |
+
"loss": 2.9431,
|
7454 |
+
"step": 1059
|
7455 |
+
},
|
7456 |
+
{
|
7457 |
+
"epoch": 0.17938737519038755,
|
7458 |
+
"grad_norm": 3.793945550918579,
|
7459 |
+
"learning_rate": 2.2814848014971858e-05,
|
7460 |
+
"loss": 2.4629,
|
7461 |
+
"step": 1060
|
7462 |
+
},
|
7463 |
+
{
|
7464 |
+
"epoch": 0.17955660856320865,
|
7465 |
+
"grad_norm": 4.213638782501221,
|
7466 |
+
"learning_rate": 2.2666550636149354e-05,
|
7467 |
+
"loss": 3.0822,
|
7468 |
+
"step": 1061
|
7469 |
+
},
|
7470 |
+
{
|
7471 |
+
"epoch": 0.17972584193602978,
|
7472 |
+
"grad_norm": 4.18215274810791,
|
7473 |
+
"learning_rate": 2.2518675169796343e-05,
|
7474 |
+
"loss": 2.214,
|
7475 |
+
"step": 1062
|
7476 |
+
},
|
7477 |
+
{
|
7478 |
+
"epoch": 0.1798950753088509,
|
7479 |
+
"grad_norm": 3.336693525314331,
|
7480 |
+
"learning_rate": 2.23712224226854e-05,
|
7481 |
+
"loss": 2.0548,
|
7482 |
+
"step": 1063
|
7483 |
+
},
|
7484 |
+
{
|
7485 |
+
"epoch": 0.18006430868167203,
|
7486 |
+
"grad_norm": 4.232928276062012,
|
7487 |
+
"learning_rate": 2.2224193199282838e-05,
|
7488 |
+
"loss": 2.7847,
|
7489 |
+
"step": 1064
|
7490 |
+
},
|
7491 |
+
{
|
7492 |
+
"epoch": 0.18023354205449316,
|
7493 |
+
"grad_norm": 3.835306406021118,
|
7494 |
+
"learning_rate": 2.2077588301744233e-05,
|
7495 |
+
"loss": 2.7982,
|
7496 |
+
"step": 1065
|
7497 |
+
},
|
7498 |
+
{
|
7499 |
+
"epoch": 0.18040277542731425,
|
7500 |
+
"grad_norm": 4.385756015777588,
|
7501 |
+
"learning_rate": 2.1931408529910246e-05,
|
7502 |
+
"loss": 2.336,
|
7503 |
+
"step": 1066
|
7504 |
+
},
|
7505 |
+
{
|
7506 |
+
"epoch": 0.18057200880013538,
|
7507 |
+
"grad_norm": 4.008677005767822,
|
7508 |
+
"learning_rate": 2.1785654681302116e-05,
|
7509 |
+
"loss": 2.7041,
|
7510 |
+
"step": 1067
|
7511 |
+
},
|
7512 |
+
{
|
7513 |
+
"epoch": 0.1807412421729565,
|
7514 |
+
"grad_norm": 4.5425872802734375,
|
7515 |
+
"learning_rate": 2.1640327551117336e-05,
|
7516 |
+
"loss": 2.5116,
|
7517 |
+
"step": 1068
|
7518 |
+
},
|
7519 |
+
{
|
7520 |
+
"epoch": 0.18091047554577763,
|
7521 |
+
"grad_norm": 4.540824890136719,
|
7522 |
+
"learning_rate": 2.1495427932225388e-05,
|
7523 |
+
"loss": 2.4235,
|
7524 |
+
"step": 1069
|
7525 |
+
},
|
7526 |
+
{
|
7527 |
+
"epoch": 0.18107970891859876,
|
7528 |
+
"grad_norm": 5.376448154449463,
|
7529 |
+
"learning_rate": 2.1350956615163254e-05,
|
7530 |
+
"loss": 2.6334,
|
7531 |
+
"step": 1070
|
7532 |
+
},
|
7533 |
+
{
|
7534 |
+
"epoch": 0.18124894229141986,
|
7535 |
+
"grad_norm": 3.7366974353790283,
|
7536 |
+
"learning_rate": 2.1206914388131283e-05,
|
7537 |
+
"loss": 2.7468,
|
7538 |
+
"step": 1071
|
7539 |
+
},
|
7540 |
+
{
|
7541 |
+
"epoch": 0.18141817566424098,
|
7542 |
+
"grad_norm": 4.922610282897949,
|
7543 |
+
"learning_rate": 2.1063302036988798e-05,
|
7544 |
+
"loss": 2.7535,
|
7545 |
+
"step": 1072
|
7546 |
+
},
|
7547 |
+
{
|
7548 |
+
"epoch": 0.1815874090370621,
|
7549 |
+
"grad_norm": 3.4199185371398926,
|
7550 |
+
"learning_rate": 2.0920120345249838e-05,
|
7551 |
+
"loss": 1.8505,
|
7552 |
+
"step": 1073
|
7553 |
+
},
|
7554 |
+
{
|
7555 |
+
"epoch": 0.18175664240988323,
|
7556 |
+
"grad_norm": 3.912236452102661,
|
7557 |
+
"learning_rate": 2.0777370094078797e-05,
|
7558 |
+
"loss": 2.2763,
|
7559 |
+
"step": 1074
|
7560 |
+
},
|
7561 |
+
{
|
7562 |
+
"epoch": 0.18192587578270436,
|
7563 |
+
"grad_norm": 5.081204891204834,
|
7564 |
+
"learning_rate": 2.063505206228632e-05,
|
7565 |
+
"loss": 2.4415,
|
7566 |
+
"step": 1075
|
7567 |
+
},
|
7568 |
+
{
|
7569 |
+
"epoch": 0.18209510915552546,
|
7570 |
+
"grad_norm": 3.8375396728515625,
|
7571 |
+
"learning_rate": 2.0493167026324922e-05,
|
7572 |
+
"loss": 2.4481,
|
7573 |
+
"step": 1076
|
7574 |
+
},
|
7575 |
+
{
|
7576 |
+
"epoch": 0.18226434252834658,
|
7577 |
+
"grad_norm": 3.699528455734253,
|
7578 |
+
"learning_rate": 2.0351715760284828e-05,
|
7579 |
+
"loss": 2.1769,
|
7580 |
+
"step": 1077
|
7581 |
+
},
|
7582 |
+
{
|
7583 |
+
"epoch": 0.1824335759011677,
|
7584 |
+
"grad_norm": 4.060338020324707,
|
7585 |
+
"learning_rate": 2.0210699035889725e-05,
|
7586 |
+
"loss": 2.5054,
|
7587 |
+
"step": 1078
|
7588 |
+
},
|
7589 |
+
{
|
7590 |
+
"epoch": 0.18260280927398884,
|
7591 |
+
"grad_norm": 4.67859411239624,
|
7592 |
+
"learning_rate": 2.0070117622492457e-05,
|
7593 |
+
"loss": 2.7125,
|
7594 |
+
"step": 1079
|
7595 |
+
},
|
7596 |
+
{
|
7597 |
+
"epoch": 0.18277204264680996,
|
7598 |
+
"grad_norm": 3.7729649543762207,
|
7599 |
+
"learning_rate": 1.992997228707103e-05,
|
7600 |
+
"loss": 1.9954,
|
7601 |
+
"step": 1080
|
7602 |
+
},
|
7603 |
+
{
|
7604 |
+
"epoch": 0.18294127601963106,
|
7605 |
+
"grad_norm": 3.707524299621582,
|
7606 |
+
"learning_rate": 1.9790263794224263e-05,
|
7607 |
+
"loss": 2.0318,
|
7608 |
+
"step": 1081
|
7609 |
+
},
|
7610 |
+
{
|
7611 |
+
"epoch": 0.1831105093924522,
|
7612 |
+
"grad_norm": 4.265018939971924,
|
7613 |
+
"learning_rate": 1.96509929061677e-05,
|
7614 |
+
"loss": 2.3878,
|
7615 |
+
"step": 1082
|
7616 |
+
},
|
7617 |
+
{
|
7618 |
+
"epoch": 0.1832797427652733,
|
7619 |
+
"grad_norm": 4.8140549659729,
|
7620 |
+
"learning_rate": 1.951216038272934e-05,
|
7621 |
+
"loss": 2.616,
|
7622 |
+
"step": 1083
|
7623 |
+
},
|
7624 |
+
{
|
7625 |
+
"epoch": 0.18344897613809444,
|
7626 |
+
"grad_norm": 4.044152736663818,
|
7627 |
+
"learning_rate": 1.9373766981345675e-05,
|
7628 |
+
"loss": 2.6408,
|
7629 |
+
"step": 1084
|
7630 |
+
},
|
7631 |
+
{
|
7632 |
+
"epoch": 0.18361820951091556,
|
7633 |
+
"grad_norm": 4.757798194885254,
|
7634 |
+
"learning_rate": 1.923581345705736e-05,
|
7635 |
+
"loss": 2.4969,
|
7636 |
+
"step": 1085
|
7637 |
+
},
|
7638 |
+
{
|
7639 |
+
"epoch": 0.18378744288373666,
|
7640 |
+
"grad_norm": 4.599098205566406,
|
7641 |
+
"learning_rate": 1.9098300562505266e-05,
|
7642 |
+
"loss": 2.3713,
|
7643 |
+
"step": 1086
|
7644 |
+
},
|
7645 |
+
{
|
7646 |
+
"epoch": 0.1839566762565578,
|
7647 |
+
"grad_norm": 3.1188430786132812,
|
7648 |
+
"learning_rate": 1.8961229047926276e-05,
|
7649 |
+
"loss": 1.9863,
|
7650 |
+
"step": 1087
|
7651 |
+
},
|
7652 |
+
{
|
7653 |
+
"epoch": 0.18412590962937891,
|
7654 |
+
"grad_norm": 3.8549416065216064,
|
7655 |
+
"learning_rate": 1.8824599661149144e-05,
|
7656 |
+
"loss": 2.3418,
|
7657 |
+
"step": 1088
|
7658 |
+
},
|
7659 |
+
{
|
7660 |
+
"epoch": 0.18429514300220004,
|
7661 |
+
"grad_norm": 3.688460111618042,
|
7662 |
+
"learning_rate": 1.8688413147590566e-05,
|
7663 |
+
"loss": 2.267,
|
7664 |
+
"step": 1089
|
7665 |
+
},
|
7666 |
+
{
|
7667 |
+
"epoch": 0.18446437637502117,
|
7668 |
+
"grad_norm": 4.4874701499938965,
|
7669 |
+
"learning_rate": 1.8552670250251003e-05,
|
7670 |
+
"loss": 2.2783,
|
7671 |
+
"step": 1090
|
7672 |
+
},
|
7673 |
+
{
|
7674 |
+
"epoch": 0.18463360974784226,
|
7675 |
+
"grad_norm": 3.745515823364258,
|
7676 |
+
"learning_rate": 1.8417371709710673e-05,
|
7677 |
+
"loss": 2.4538,
|
7678 |
+
"step": 1091
|
7679 |
+
},
|
7680 |
+
{
|
7681 |
+
"epoch": 0.1848028431206634,
|
7682 |
+
"grad_norm": 3.9514317512512207,
|
7683 |
+
"learning_rate": 1.8282518264125414e-05,
|
7684 |
+
"loss": 2.8781,
|
7685 |
+
"step": 1092
|
7686 |
+
},
|
7687 |
+
{
|
7688 |
+
"epoch": 0.18497207649348452,
|
7689 |
+
"grad_norm": 3.164168357849121,
|
7690 |
+
"learning_rate": 1.8148110649222826e-05,
|
7691 |
+
"loss": 1.9176,
|
7692 |
+
"step": 1093
|
7693 |
+
},
|
7694 |
+
{
|
7695 |
+
"epoch": 0.18514130986630564,
|
7696 |
+
"grad_norm": 5.821022033691406,
|
7697 |
+
"learning_rate": 1.8014149598298112e-05,
|
7698 |
+
"loss": 2.8274,
|
7699 |
+
"step": 1094
|
7700 |
+
},
|
7701 |
+
{
|
7702 |
+
"epoch": 0.18531054323912677,
|
7703 |
+
"grad_norm": 4.494678497314453,
|
7704 |
+
"learning_rate": 1.788063584221017e-05,
|
7705 |
+
"loss": 2.4276,
|
7706 |
+
"step": 1095
|
7707 |
+
},
|
7708 |
+
{
|
7709 |
+
"epoch": 0.18547977661194787,
|
7710 |
+
"grad_norm": 3.5913803577423096,
|
7711 |
+
"learning_rate": 1.774757010937754e-05,
|
7712 |
+
"loss": 2.1764,
|
7713 |
+
"step": 1096
|
7714 |
+
},
|
7715 |
+
{
|
7716 |
+
"epoch": 0.185649009984769,
|
7717 |
+
"grad_norm": 4.672360897064209,
|
7718 |
+
"learning_rate": 1.7614953125774436e-05,
|
7719 |
+
"loss": 2.8948,
|
7720 |
+
"step": 1097
|
7721 |
+
},
|
7722 |
+
{
|
7723 |
+
"epoch": 0.18581824335759012,
|
7724 |
+
"grad_norm": 4.718415260314941,
|
7725 |
+
"learning_rate": 1.7482785614926835e-05,
|
7726 |
+
"loss": 3.0947,
|
7727 |
+
"step": 1098
|
7728 |
+
},
|
7729 |
+
{
|
7730 |
+
"epoch": 0.18598747673041124,
|
7731 |
+
"grad_norm": 4.332279205322266,
|
7732 |
+
"learning_rate": 1.73510682979085e-05,
|
7733 |
+
"loss": 2.0206,
|
7734 |
+
"step": 1099
|
7735 |
+
},
|
7736 |
+
{
|
7737 |
+
"epoch": 0.18615671010323237,
|
7738 |
+
"grad_norm": 3.6991453170776367,
|
7739 |
+
"learning_rate": 1.7219801893337074e-05,
|
7740 |
+
"loss": 2.735,
|
7741 |
+
"step": 1100
|
7742 |
+
},
|
7743 |
+
{
|
7744 |
+
"epoch": 0.18632594347605347,
|
7745 |
+
"grad_norm": 5.5317888259887695,
|
7746 |
+
"learning_rate": 1.7088987117370038e-05,
|
7747 |
+
"loss": 3.4384,
|
7748 |
+
"step": 1101
|
7749 |
+
},
|
7750 |
+
{
|
7751 |
+
"epoch": 0.1864951768488746,
|
7752 |
+
"grad_norm": 4.364567279815674,
|
7753 |
+
"learning_rate": 1.6958624683700973e-05,
|
7754 |
+
"loss": 2.8597,
|
7755 |
+
"step": 1102
|
7756 |
+
},
|
7757 |
+
{
|
7758 |
+
"epoch": 0.18666441022169572,
|
7759 |
+
"grad_norm": 3.692396879196167,
|
7760 |
+
"learning_rate": 1.682871530355561e-05,
|
7761 |
+
"loss": 2.4211,
|
7762 |
+
"step": 1103
|
7763 |
+
},
|
7764 |
+
{
|
7765 |
+
"epoch": 0.18683364359451685,
|
7766 |
+
"grad_norm": 3.827686309814453,
|
7767 |
+
"learning_rate": 1.669925968568785e-05,
|
7768 |
+
"loss": 2.6772,
|
7769 |
+
"step": 1104
|
7770 |
+
},
|
7771 |
+
{
|
7772 |
+
"epoch": 0.18700287696733797,
|
7773 |
+
"grad_norm": 3.8148603439331055,
|
7774 |
+
"learning_rate": 1.6570258536376083e-05,
|
7775 |
+
"loss": 2.6066,
|
7776 |
+
"step": 1105
|
7777 |
+
},
|
7778 |
+
{
|
7779 |
+
"epoch": 0.18717211034015907,
|
7780 |
+
"grad_norm": 3.7202560901641846,
|
7781 |
+
"learning_rate": 1.6441712559419097e-05,
|
7782 |
+
"loss": 1.9969,
|
7783 |
+
"step": 1106
|
7784 |
+
},
|
7785 |
+
{
|
7786 |
+
"epoch": 0.1873413437129802,
|
7787 |
+
"grad_norm": 5.512956619262695,
|
7788 |
+
"learning_rate": 1.6313622456132503e-05,
|
7789 |
+
"loss": 2.897,
|
7790 |
+
"step": 1107
|
7791 |
+
},
|
7792 |
+
{
|
7793 |
+
"epoch": 0.18751057708580132,
|
7794 |
+
"grad_norm": 4.357888221740723,
|
7795 |
+
"learning_rate": 1.6185988925344665e-05,
|
7796 |
+
"loss": 2.551,
|
7797 |
+
"step": 1108
|
7798 |
+
},
|
7799 |
+
{
|
7800 |
+
"epoch": 0.18767981045862245,
|
7801 |
+
"grad_norm": 4.925058841705322,
|
7802 |
+
"learning_rate": 1.6058812663393062e-05,
|
7803 |
+
"loss": 2.3116,
|
7804 |
+
"step": 1109
|
7805 |
+
},
|
7806 |
+
{
|
7807 |
+
"epoch": 0.18784904383144357,
|
7808 |
+
"grad_norm": 3.9438397884368896,
|
7809 |
+
"learning_rate": 1.5932094364120453e-05,
|
7810 |
+
"loss": 2.3844,
|
7811 |
+
"step": 1110
|
7812 |
+
},
|
7813 |
+
{
|
7814 |
+
"epoch": 0.18801827720426467,
|
7815 |
+
"grad_norm": 4.715054512023926,
|
7816 |
+
"learning_rate": 1.580583471887097e-05,
|
7817 |
+
"loss": 2.3499,
|
7818 |
+
"step": 1111
|
7819 |
+
},
|
7820 |
+
{
|
7821 |
+
"epoch": 0.1881875105770858,
|
7822 |
+
"grad_norm": 4.1592607498168945,
|
7823 |
+
"learning_rate": 1.5680034416486512e-05,
|
7824 |
+
"loss": 2.1975,
|
7825 |
+
"step": 1112
|
7826 |
+
},
|
7827 |
+
{
|
7828 |
+
"epoch": 0.18835674394990692,
|
7829 |
+
"grad_norm": 3.9723381996154785,
|
7830 |
+
"learning_rate": 1.5554694143302907e-05,
|
7831 |
+
"loss": 2.4127,
|
7832 |
+
"step": 1113
|
7833 |
+
},
|
7834 |
+
{
|
7835 |
+
"epoch": 0.18852597732272805,
|
7836 |
+
"grad_norm": 4.235254287719727,
|
7837 |
+
"learning_rate": 1.5429814583146173e-05,
|
7838 |
+
"loss": 2.3242,
|
7839 |
+
"step": 1114
|
7840 |
+
},
|
7841 |
+
{
|
7842 |
+
"epoch": 0.18869521069554918,
|
7843 |
+
"grad_norm": 5.990997791290283,
|
7844 |
+
"learning_rate": 1.5305396417328756e-05,
|
7845 |
+
"loss": 2.6977,
|
7846 |
+
"step": 1115
|
7847 |
+
},
|
7848 |
+
{
|
7849 |
+
"epoch": 0.18886444406837027,
|
7850 |
+
"grad_norm": 3.7243008613586426,
|
7851 |
+
"learning_rate": 1.518144032464589e-05,
|
7852 |
+
"loss": 2.1743,
|
7853 |
+
"step": 1116
|
7854 |
+
},
|
7855 |
+
{
|
7856 |
+
"epoch": 0.1890336774411914,
|
7857 |
+
"grad_norm": 3.2548139095306396,
|
7858 |
+
"learning_rate": 1.5057946981371828e-05,
|
7859 |
+
"loss": 2.4581,
|
7860 |
+
"step": 1117
|
7861 |
+
},
|
7862 |
+
{
|
7863 |
+
"epoch": 0.18920291081401253,
|
7864 |
+
"grad_norm": 4.162649154663086,
|
7865 |
+
"learning_rate": 1.4934917061256193e-05,
|
7866 |
+
"loss": 2.9046,
|
7867 |
+
"step": 1118
|
7868 |
+
},
|
7869 |
+
{
|
7870 |
+
"epoch": 0.18937214418683365,
|
7871 |
+
"grad_norm": 4.94911527633667,
|
7872 |
+
"learning_rate": 1.4812351235520282e-05,
|
7873 |
+
"loss": 3.3423,
|
7874 |
+
"step": 1119
|
7875 |
+
},
|
7876 |
+
{
|
7877 |
+
"epoch": 0.18954137755965475,
|
7878 |
+
"grad_norm": 4.10012149810791,
|
7879 |
+
"learning_rate": 1.4690250172853348e-05,
|
7880 |
+
"loss": 2.3461,
|
7881 |
+
"step": 1120
|
7882 |
+
},
|
7883 |
+
{
|
7884 |
+
"epoch": 0.18971061093247588,
|
7885 |
+
"grad_norm": 4.512474060058594,
|
7886 |
+
"learning_rate": 1.4568614539409098e-05,
|
7887 |
+
"loss": 2.8093,
|
7888 |
+
"step": 1121
|
7889 |
+
},
|
7890 |
+
{
|
7891 |
+
"epoch": 0.189879844305297,
|
7892 |
+
"grad_norm": 3.8283495903015137,
|
7893 |
+
"learning_rate": 1.444744499880193e-05,
|
7894 |
+
"loss": 1.9478,
|
7895 |
+
"step": 1122
|
7896 |
+
},
|
7897 |
+
{
|
7898 |
+
"epoch": 0.19004907767811813,
|
7899 |
+
"grad_norm": 5.450031280517578,
|
7900 |
+
"learning_rate": 1.432674221210335e-05,
|
7901 |
+
"loss": 2.9874,
|
7902 |
+
"step": 1123
|
7903 |
+
},
|
7904 |
+
{
|
7905 |
+
"epoch": 0.19021831105093925,
|
7906 |
+
"grad_norm": 3.768235921859741,
|
7907 |
+
"learning_rate": 1.4206506837838352e-05,
|
7908 |
+
"loss": 2.4975,
|
7909 |
+
"step": 1124
|
7910 |
+
},
|
7911 |
+
{
|
7912 |
+
"epoch": 0.19038754442376035,
|
7913 |
+
"grad_norm": 3.930394172668457,
|
7914 |
+
"learning_rate": 1.4086739531981885e-05,
|
7915 |
+
"loss": 2.731,
|
7916 |
+
"step": 1125
|
7917 |
+
},
|
7918 |
+
{
|
7919 |
+
"epoch": 0.19055677779658148,
|
7920 |
+
"grad_norm": 4.030210971832275,
|
7921 |
+
"learning_rate": 1.396744094795519e-05,
|
7922 |
+
"loss": 1.9317,
|
7923 |
+
"step": 1126
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 0.1907260111694026,
|
7927 |
+
"grad_norm": 3.4844772815704346,
|
7928 |
+
"learning_rate": 1.3848611736622297e-05,
|
7929 |
+
"loss": 2.7222,
|
7930 |
+
"step": 1127
|
7931 |
+
},
|
7932 |
+
{
|
7933 |
+
"epoch": 0.19089524454222373,
|
7934 |
+
"grad_norm": 3.6188392639160156,
|
7935 |
+
"learning_rate": 1.3730252546286481e-05,
|
7936 |
+
"loss": 2.3593,
|
7937 |
+
"step": 1128
|
7938 |
+
},
|
7939 |
+
{
|
7940 |
+
"epoch": 0.19106447791504486,
|
7941 |
+
"grad_norm": 4.572890281677246,
|
7942 |
+
"learning_rate": 1.3612364022686619e-05,
|
7943 |
+
"loss": 2.9578,
|
7944 |
+
"step": 1129
|
7945 |
+
},
|
7946 |
+
{
|
7947 |
+
"epoch": 0.19123371128786595,
|
7948 |
+
"grad_norm": 3.8525936603546143,
|
7949 |
+
"learning_rate": 1.3494946808993803e-05,
|
7950 |
+
"loss": 2.2877,
|
7951 |
+
"step": 1130
|
7952 |
+
},
|
7953 |
+
{
|
7954 |
+
"epoch": 0.19140294466068708,
|
7955 |
+
"grad_norm": 3.545377731323242,
|
7956 |
+
"learning_rate": 1.3378001545807795e-05,
|
7957 |
+
"loss": 2.4046,
|
7958 |
+
"step": 1131
|
7959 |
+
},
|
7960 |
+
{
|
7961 |
+
"epoch": 0.1915721780335082,
|
7962 |
+
"grad_norm": 5.4002203941345215,
|
7963 |
+
"learning_rate": 1.3261528871153417e-05,
|
7964 |
+
"loss": 2.8953,
|
7965 |
+
"step": 1132
|
7966 |
+
},
|
7967 |
+
{
|
7968 |
+
"epoch": 0.19174141140632933,
|
7969 |
+
"grad_norm": 3.8969218730926514,
|
7970 |
+
"learning_rate": 1.3145529420477276e-05,
|
7971 |
+
"loss": 2.0981,
|
7972 |
+
"step": 1133
|
7973 |
+
},
|
7974 |
+
{
|
7975 |
+
"epoch": 0.19191064477915046,
|
7976 |
+
"grad_norm": 3.4931914806365967,
|
7977 |
+
"learning_rate": 1.3030003826644077e-05,
|
7978 |
+
"loss": 2.2558,
|
7979 |
+
"step": 1134
|
7980 |
+
},
|
7981 |
+
{
|
7982 |
+
"epoch": 0.19207987815197156,
|
7983 |
+
"grad_norm": 4.584532260894775,
|
7984 |
+
"learning_rate": 1.2914952719933371e-05,
|
7985 |
+
"loss": 2.9124,
|
7986 |
+
"step": 1135
|
7987 |
+
},
|
7988 |
+
{
|
7989 |
+
"epoch": 0.19224911152479268,
|
7990 |
+
"grad_norm": 3.692653179168701,
|
7991 |
+
"learning_rate": 1.2800376728035956e-05,
|
7992 |
+
"loss": 2.477,
|
7993 |
+
"step": 1136
|
7994 |
+
},
|
7995 |
+
{
|
7996 |
+
"epoch": 0.1924183448976138,
|
7997 |
+
"grad_norm": 3.949140787124634,
|
7998 |
+
"learning_rate": 1.2686276476050596e-05,
|
7999 |
+
"loss": 2.4982,
|
8000 |
+
"step": 1137
|
8001 |
+
},
|
8002 |
+
{
|
8003 |
+
"epoch": 0.19258757827043493,
|
8004 |
+
"grad_norm": 4.792884826660156,
|
8005 |
+
"learning_rate": 1.2572652586480416e-05,
|
8006 |
+
"loss": 2.7599,
|
8007 |
+
"step": 1138
|
8008 |
+
},
|
8009 |
+
{
|
8010 |
+
"epoch": 0.19275681164325606,
|
8011 |
+
"grad_norm": 3.8859872817993164,
|
8012 |
+
"learning_rate": 1.2459505679229722e-05,
|
8013 |
+
"loss": 2.6972,
|
8014 |
+
"step": 1139
|
8015 |
+
},
|
8016 |
+
{
|
8017 |
+
"epoch": 0.19292604501607716,
|
8018 |
+
"grad_norm": 4.138924598693848,
|
8019 |
+
"learning_rate": 1.234683637160048e-05,
|
8020 |
+
"loss": 2.0457,
|
8021 |
+
"step": 1140
|
8022 |
+
},
|
8023 |
+
{
|
8024 |
+
"epoch": 0.19309527838889828,
|
8025 |
+
"grad_norm": 3.749267578125,
|
8026 |
+
"learning_rate": 1.2234645278289037e-05,
|
8027 |
+
"loss": 2.3956,
|
8028 |
+
"step": 1141
|
8029 |
+
},
|
8030 |
+
{
|
8031 |
+
"epoch": 0.1932645117617194,
|
8032 |
+
"grad_norm": 3.974944829940796,
|
8033 |
+
"learning_rate": 1.2122933011382631e-05,
|
8034 |
+
"loss": 2.3226,
|
8035 |
+
"step": 1142
|
8036 |
+
},
|
8037 |
+
{
|
8038 |
+
"epoch": 0.19343374513454054,
|
8039 |
+
"grad_norm": 3.411405086517334,
|
8040 |
+
"learning_rate": 1.20117001803562e-05,
|
8041 |
+
"loss": 2.0167,
|
8042 |
+
"step": 1143
|
8043 |
+
},
|
8044 |
+
{
|
8045 |
+
"epoch": 0.19360297850736166,
|
8046 |
+
"grad_norm": 3.2423627376556396,
|
8047 |
+
"learning_rate": 1.1900947392068995e-05,
|
8048 |
+
"loss": 1.9001,
|
8049 |
+
"step": 1144
|
8050 |
+
},
|
8051 |
+
{
|
8052 |
+
"epoch": 0.19377221188018276,
|
8053 |
+
"grad_norm": 4.742347240447998,
|
8054 |
+
"learning_rate": 1.1790675250761263e-05,
|
8055 |
+
"loss": 2.7298,
|
8056 |
+
"step": 1145
|
8057 |
+
},
|
8058 |
+
{
|
8059 |
+
"epoch": 0.19394144525300389,
|
8060 |
+
"grad_norm": 4.177172660827637,
|
8061 |
+
"learning_rate": 1.1680884358050969e-05,
|
8062 |
+
"loss": 2.4894,
|
8063 |
+
"step": 1146
|
8064 |
+
},
|
8065 |
+
{
|
8066 |
+
"epoch": 0.194110678625825,
|
8067 |
+
"grad_norm": 4.569289207458496,
|
8068 |
+
"learning_rate": 1.157157531293045e-05,
|
8069 |
+
"loss": 2.9185,
|
8070 |
+
"step": 1147
|
8071 |
+
},
|
8072 |
+
{
|
8073 |
+
"epoch": 0.19427991199864614,
|
8074 |
+
"grad_norm": 3.3422698974609375,
|
8075 |
+
"learning_rate": 1.1462748711763272e-05,
|
8076 |
+
"loss": 2.5378,
|
8077 |
+
"step": 1148
|
8078 |
+
},
|
8079 |
+
{
|
8080 |
+
"epoch": 0.19444914537146726,
|
8081 |
+
"grad_norm": 4.30674934387207,
|
8082 |
+
"learning_rate": 1.1354405148280855e-05,
|
8083 |
+
"loss": 2.6205,
|
8084 |
+
"step": 1149
|
8085 |
+
},
|
8086 |
+
{
|
8087 |
+
"epoch": 0.19461837874428836,
|
8088 |
+
"grad_norm": 4.322670936584473,
|
8089 |
+
"learning_rate": 1.124654521357934e-05,
|
8090 |
+
"loss": 2.8588,
|
8091 |
+
"step": 1150
|
8092 |
+
},
|
8093 |
+
{
|
8094 |
+
"epoch": 0.1947876121171095,
|
8095 |
+
"grad_norm": 3.0093047618865967,
|
8096 |
+
"learning_rate": 1.113916949611622e-05,
|
8097 |
+
"loss": 1.8716,
|
8098 |
+
"step": 1151
|
8099 |
+
},
|
8100 |
+
{
|
8101 |
+
"epoch": 0.1949568454899306,
|
8102 |
+
"grad_norm": 4.273172855377197,
|
8103 |
+
"learning_rate": 1.1032278581707267e-05,
|
8104 |
+
"loss": 2.5739,
|
8105 |
+
"step": 1152
|
8106 |
+
},
|
8107 |
+
{
|
8108 |
+
"epoch": 0.19512607886275174,
|
8109 |
+
"grad_norm": 5.307528018951416,
|
8110 |
+
"learning_rate": 1.0925873053523305e-05,
|
8111 |
+
"loss": 2.3395,
|
8112 |
+
"step": 1153
|
8113 |
+
},
|
8114 |
+
{
|
8115 |
+
"epoch": 0.19529531223557287,
|
8116 |
+
"grad_norm": 4.219779968261719,
|
8117 |
+
"learning_rate": 1.0819953492087009e-05,
|
8118 |
+
"loss": 2.6265,
|
8119 |
+
"step": 1154
|
8120 |
+
},
|
8121 |
+
{
|
8122 |
+
"epoch": 0.19546454560839396,
|
8123 |
+
"grad_norm": 5.497633934020996,
|
8124 |
+
"learning_rate": 1.0714520475269652e-05,
|
8125 |
+
"loss": 2.4528,
|
8126 |
+
"step": 1155
|
8127 |
+
},
|
8128 |
+
{
|
8129 |
+
"epoch": 0.1956337789812151,
|
8130 |
+
"grad_norm": 4.362338066101074,
|
8131 |
+
"learning_rate": 1.0609574578288161e-05,
|
8132 |
+
"loss": 2.7484,
|
8133 |
+
"step": 1156
|
8134 |
+
},
|
8135 |
+
{
|
8136 |
+
"epoch": 0.19580301235403622,
|
8137 |
+
"grad_norm": 3.573737859725952,
|
8138 |
+
"learning_rate": 1.050511637370175e-05,
|
8139 |
+
"loss": 2.5373,
|
8140 |
+
"step": 1157
|
8141 |
+
},
|
8142 |
+
{
|
8143 |
+
"epoch": 0.19597224572685734,
|
8144 |
+
"grad_norm": 4.227601528167725,
|
8145 |
+
"learning_rate": 1.040114643140897e-05,
|
8146 |
+
"loss": 2.7768,
|
8147 |
+
"step": 1158
|
8148 |
+
},
|
8149 |
+
{
|
8150 |
+
"epoch": 0.19614147909967847,
|
8151 |
+
"grad_norm": 3.399428129196167,
|
8152 |
+
"learning_rate": 1.0297665318644522e-05,
|
8153 |
+
"loss": 2.2801,
|
8154 |
+
"step": 1159
|
8155 |
+
},
|
8156 |
+
{
|
8157 |
+
"epoch": 0.19631071247249957,
|
8158 |
+
"grad_norm": 4.715485572814941,
|
8159 |
+
"learning_rate": 1.0194673599976134e-05,
|
8160 |
+
"loss": 3.3442,
|
8161 |
+
"step": 1160
|
8162 |
+
},
|
8163 |
+
{
|
8164 |
+
"epoch": 0.1964799458453207,
|
8165 |
+
"grad_norm": 4.800102233886719,
|
8166 |
+
"learning_rate": 1.0092171837301556e-05,
|
8167 |
+
"loss": 2.2439,
|
8168 |
+
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.19664917921814182,
|
8172 |
+
"grad_norm": 4.825545310974121,
|
8173 |
+
"learning_rate": 9.990160589845444e-06,
|
8174 |
+
"loss": 3.1834,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.19681841259096294,
|
8179 |
+
"grad_norm": 4.520576477050781,
|
8180 |
+
"learning_rate": 9.888640414156336e-06,
|
8181 |
+
"loss": 2.7257,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.19698764596378407,
|
8186 |
+
"grad_norm": 3.46256947517395,
|
8187 |
+
"learning_rate": 9.787611864103608e-06,
|
8188 |
+
"loss": 2.2377,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.19715687933660517,
|
8193 |
+
"grad_norm": 3.479843854904175,
|
8194 |
+
"learning_rate": 9.687075490874376e-06,
|
8195 |
+
"loss": 2.5336,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.1973261127094263,
|
8200 |
+
"grad_norm": 4.506025314331055,
|
8201 |
+
"learning_rate": 9.587031842970651e-06,
|
8202 |
+
"loss": 2.7586,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.19749534608224742,
|
8207 |
+
"grad_norm": 5.445701599121094,
|
8208 |
+
"learning_rate": 9.487481466206206e-06,
|
8209 |
+
"loss": 2.7683,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.19766457945506855,
|
8214 |
+
"grad_norm": 3.872097969055176,
|
8215 |
+
"learning_rate": 9.388424903703673e-06,
|
8216 |
+
"loss": 2.1866,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.19783381282788967,
|
8221 |
+
"grad_norm": 4.747255325317383,
|
8222 |
+
"learning_rate": 9.289862695891505e-06,
|
8223 |
+
"loss": 2.5756,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.19800304620071077,
|
8228 |
+
"grad_norm": 3.8052592277526855,
|
8229 |
+
"learning_rate": 9.191795380501134e-06,
|
8230 |
+
"loss": 2.923,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.1981722795735319,
|
8235 |
+
"grad_norm": 4.233860015869141,
|
8236 |
+
"learning_rate": 9.094223492563936e-06,
|
8237 |
+
"loss": 3.0105,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.19834151294635302,
|
8242 |
+
"grad_norm": 3.8127241134643555,
|
8243 |
+
"learning_rate": 8.997147564408393e-06,
|
8244 |
+
"loss": 2.4869,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.19851074631917415,
|
8249 |
+
"grad_norm": 4.697332859039307,
|
8250 |
+
"learning_rate": 8.900568125657138e-06,
|
8251 |
+
"loss": 2.5324,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.19867997969199527,
|
8256 |
+
"grad_norm": 4.7639384269714355,
|
8257 |
+
"learning_rate": 8.80448570322403e-06,
|
8258 |
+
"loss": 3.0871,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.19884921306481637,
|
8263 |
+
"grad_norm": 3.3893449306488037,
|
8264 |
+
"learning_rate": 8.708900821311405e-06,
|
8265 |
+
"loss": 2.6548,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.1990184464376375,
|
8270 |
+
"grad_norm": 3.3379271030426025,
|
8271 |
+
"learning_rate": 8.613814001407095e-06,
|
8272 |
+
"loss": 1.7754,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.19918767981045862,
|
8277 |
+
"grad_norm": 5.333044052124023,
|
8278 |
+
"learning_rate": 8.519225762281658e-06,
|
8279 |
+
"loss": 3.212,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.19935691318327975,
|
8284 |
+
"grad_norm": 4.818188667297363,
|
8285 |
+
"learning_rate": 8.425136619985453e-06,
|
8286 |
+
"loss": 2.7269,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.19952614655610088,
|
8291 |
+
"grad_norm": 4.242152690887451,
|
8292 |
+
"learning_rate": 8.33154708784597e-06,
|
8293 |
+
"loss": 2.8795,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.19969537992892197,
|
8298 |
+
"grad_norm": 3.871042251586914,
|
8299 |
+
"learning_rate": 8.238457676464872e-06,
|
8300 |
+
"loss": 2.4768,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.1998646133017431,
|
8305 |
+
"grad_norm": 4.575113296508789,
|
8306 |
+
"learning_rate": 8.145868893715335e-06,
|
8307 |
+
"loss": 2.9311,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.20003384667456423,
|
8312 |
+
"grad_norm": 3.455521583557129,
|
8313 |
+
"learning_rate": 8.053781244739245e-06,
|
8314 |
+
"loss": 1.8744,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.20020308004738535,
|
8319 |
+
"grad_norm": 3.984724521636963,
|
8320 |
+
"learning_rate": 7.962195231944336e-06,
|
8321 |
+
"loss": 2.321,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.20037231342020648,
|
8326 |
+
"grad_norm": 3.9881107807159424,
|
8327 |
+
"learning_rate": 7.871111355001625e-06,
|
8328 |
+
"loss": 2.6127,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.20054154679302758,
|
8333 |
+
"grad_norm": 3.8315985202789307,
|
8334 |
+
"learning_rate": 7.780530110842565e-06,
|
8335 |
+
"loss": 2.4565,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.2007107801658487,
|
8340 |
+
"grad_norm": 4.196242809295654,
|
8341 |
+
"learning_rate": 7.690451993656378e-06,
|
8342 |
+
"loss": 2.8104,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.20088001353866983,
|
8347 |
+
"grad_norm": 3.9669601917266846,
|
8348 |
+
"learning_rate": 7.6008774948873155e-06,
|
8349 |
+
"loss": 2.5335,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.20104924691149095,
|
8354 |
+
"grad_norm": 4.353307247161865,
|
8355 |
+
"learning_rate": 7.511807103232038e-06,
|
8356 |
+
"loss": 2.4607,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.20121848028431208,
|
8361 |
+
"grad_norm": 5.587460517883301,
|
8362 |
+
"learning_rate": 7.423241304636919e-06,
|
8363 |
+
"loss": 2.703,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.20138771365713318,
|
8368 |
+
"grad_norm": 3.306898355484009,
|
8369 |
+
"learning_rate": 7.335180582295386e-06,
|
8370 |
+
"loss": 2.4389,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.2015569470299543,
|
8375 |
+
"grad_norm": 4.30010986328125,
|
8376 |
+
"learning_rate": 7.24762541664532e-06,
|
8377 |
+
"loss": 3.0216,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.20172618040277543,
|
8382 |
+
"grad_norm": 4.448509216308594,
|
8383 |
+
"learning_rate": 7.160576285366349e-06,
|
8384 |
+
"loss": 2.5429,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.20189541377559655,
|
8389 |
+
"grad_norm": 3.841930389404297,
|
8390 |
+
"learning_rate": 7.074033663377355e-06,
|
8391 |
+
"loss": 1.8933,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.20206464714841768,
|
8396 |
+
"grad_norm": 4.539728164672852,
|
8397 |
+
"learning_rate": 6.987998022833808e-06,
|
8398 |
+
"loss": 2.8734,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.20223388052123878,
|
8403 |
+
"grad_norm": 3.8912057876586914,
|
8404 |
+
"learning_rate": 6.902469833125236e-06,
|
8405 |
+
"loss": 2.5782,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.2024031138940599,
|
8410 |
+
"grad_norm": 4.162531852722168,
|
8411 |
+
"learning_rate": 6.817449560872591e-06,
|
8412 |
+
"loss": 2.62,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.20257234726688103,
|
8417 |
+
"grad_norm": 4.5268049240112305,
|
8418 |
+
"learning_rate": 6.732937669925765e-06,
|
8419 |
+
"loss": 2.6603,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.20274158063970216,
|
8424 |
+
"grad_norm": 4.033636093139648,
|
8425 |
+
"learning_rate": 6.648934621361091e-06,
|
8426 |
+
"loss": 2.3154,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.20291081401252328,
|
8431 |
+
"grad_norm": 4.054227352142334,
|
8432 |
+
"learning_rate": 6.565440873478723e-06,
|
8433 |
+
"loss": 2.1888,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.20308004738534438,
|
8438 |
+
"grad_norm": 4.195720195770264,
|
8439 |
+
"learning_rate": 6.482456881800247e-06,
|
8440 |
+
"loss": 2.6688,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.2032492807581655,
|
8445 |
+
"grad_norm": 4.707369804382324,
|
8446 |
+
"learning_rate": 6.399983099066076e-06,
|
8447 |
+
"loss": 2.3394,
|
8448 |
+
"step": 1201
|
8449 |
+
},
|
8450 |
+
{
|
8451 |
+
"epoch": 0.20341851413098663,
|
8452 |
+
"grad_norm": 3.888090133666992,
|
8453 |
+
"learning_rate": 6.318019975233136e-06,
|
8454 |
+
"loss": 2.6881,
|
8455 |
+
"step": 1202
|
8456 |
+
},
|
8457 |
+
{
|
8458 |
+
"epoch": 0.20358774750380776,
|
8459 |
+
"grad_norm": 4.6143646240234375,
|
8460 |
+
"learning_rate": 6.236567957472217e-06,
|
8461 |
+
"loss": 2.374,
|
8462 |
+
"step": 1203
|
8463 |
+
},
|
8464 |
+
{
|
8465 |
+
"epoch": 0.20375698087662888,
|
8466 |
+
"grad_norm": 3.9909422397613525,
|
8467 |
+
"learning_rate": 6.1556274901657385e-06,
|
8468 |
+
"loss": 2.5502,
|
8469 |
+
"step": 1204
|
8470 |
+
},
|
8471 |
+
{
|
8472 |
+
"epoch": 0.20392621424944998,
|
8473 |
+
"grad_norm": 3.8268110752105713,
|
8474 |
+
"learning_rate": 6.075199014905153e-06,
|
8475 |
+
"loss": 2.4289,
|
8476 |
+
"step": 1205
|
8477 |
+
},
|
8478 |
+
{
|
8479 |
+
"epoch": 0.2040954476222711,
|
8480 |
+
"grad_norm": 4.493071556091309,
|
8481 |
+
"learning_rate": 5.995282970488647e-06,
|
8482 |
+
"loss": 2.7838,
|
8483 |
+
"step": 1206
|
8484 |
+
},
|
8485 |
+
{
|
8486 |
+
"epoch": 0.20426468099509223,
|
8487 |
+
"grad_norm": 4.041876316070557,
|
8488 |
+
"learning_rate": 5.9158797929186995e-06,
|
8489 |
+
"loss": 2.4366,
|
8490 |
+
"step": 1207
|
8491 |
+
},
|
8492 |
+
{
|
8493 |
+
"epoch": 0.20443391436791336,
|
8494 |
+
"grad_norm": 3.6854562759399414,
|
8495 |
+
"learning_rate": 5.836989915399727e-06,
|
8496 |
+
"loss": 2.4051,
|
8497 |
+
"step": 1208
|
8498 |
+
},
|
8499 |
+
{
|
8500 |
+
"epoch": 0.20460314774073446,
|
8501 |
+
"grad_norm": 4.58350133895874,
|
8502 |
+
"learning_rate": 5.758613768335685e-06,
|
8503 |
+
"loss": 2.1039,
|
8504 |
+
"step": 1209
|
8505 |
+
},
|
8506 |
+
{
|
8507 |
+
"epoch": 0.20477238111355558,
|
8508 |
+
"grad_norm": 4.839781761169434,
|
8509 |
+
"learning_rate": 5.680751779327742e-06,
|
8510 |
+
"loss": 2.7066,
|
8511 |
+
"step": 1210
|
8512 |
+
},
|
8513 |
+
{
|
8514 |
+
"epoch": 0.2049416144863767,
|
8515 |
+
"grad_norm": 3.7150022983551025,
|
8516 |
+
"learning_rate": 5.6034043731719385e-06,
|
8517 |
+
"loss": 2.2101,
|
8518 |
+
"step": 1211
|
8519 |
+
},
|
8520 |
+
{
|
8521 |
+
"epoch": 0.20511084785919784,
|
8522 |
+
"grad_norm": 4.2822136878967285,
|
8523 |
+
"learning_rate": 5.52657197185692e-06,
|
8524 |
+
"loss": 2.8929,
|
8525 |
+
"step": 1212
|
8526 |
+
},
|
8527 |
+
{
|
8528 |
+
"epoch": 0.20528008123201896,
|
8529 |
+
"grad_norm": 4.615571975708008,
|
8530 |
+
"learning_rate": 5.450254994561543e-06,
|
8531 |
+
"loss": 2.9179,
|
8532 |
+
"step": 1213
|
8533 |
+
},
|
8534 |
+
{
|
8535 |
+
"epoch": 0.20544931460484006,
|
8536 |
+
"grad_norm": 3.698920726776123,
|
8537 |
+
"learning_rate": 5.3744538576526505e-06,
|
8538 |
+
"loss": 2.0971,
|
8539 |
+
"step": 1214
|
8540 |
+
},
|
8541 |
+
{
|
8542 |
+
"epoch": 0.2056185479776612,
|
8543 |
+
"grad_norm": 4.383177757263184,
|
8544 |
+
"learning_rate": 5.2991689746827885e-06,
|
8545 |
+
"loss": 1.8414,
|
8546 |
+
"step": 1215
|
8547 |
+
},
|
8548 |
+
{
|
8549 |
+
"epoch": 0.2057877813504823,
|
8550 |
+
"grad_norm": 3.8897697925567627,
|
8551 |
+
"learning_rate": 5.224400756387948e-06,
|
8552 |
+
"loss": 2.4696,
|
8553 |
+
"step": 1216
|
8554 |
+
},
|
8555 |
+
{
|
8556 |
+
"epoch": 0.20595701472330344,
|
8557 |
+
"grad_norm": 3.8492186069488525,
|
8558 |
+
"learning_rate": 5.150149610685318e-06,
|
8559 |
+
"loss": 2.4766,
|
8560 |
+
"step": 1217
|
8561 |
+
},
|
8562 |
+
{
|
8563 |
+
"epoch": 0.20612624809612456,
|
8564 |
+
"grad_norm": 4.1576828956604,
|
8565 |
+
"learning_rate": 5.076415942671098e-06,
|
8566 |
+
"loss": 3.1899,
|
8567 |
+
"step": 1218
|
8568 |
+
},
|
8569 |
+
{
|
8570 |
+
"epoch": 0.20629548146894566,
|
8571 |
+
"grad_norm": 3.8148016929626465,
|
8572 |
+
"learning_rate": 5.003200154618193e-06,
|
8573 |
+
"loss": 2.4443,
|
8574 |
+
"step": 1219
|
8575 |
+
},
|
8576 |
+
{
|
8577 |
+
"epoch": 0.2064647148417668,
|
8578 |
+
"grad_norm": 5.0688700675964355,
|
8579 |
+
"learning_rate": 4.9305026459741224e-06,
|
8580 |
+
"loss": 2.8179,
|
8581 |
+
"step": 1220
|
8582 |
+
},
|
8583 |
+
{
|
8584 |
+
"epoch": 0.20663394821458791,
|
8585 |
+
"grad_norm": 4.496060848236084,
|
8586 |
+
"learning_rate": 4.8583238133588e-06,
|
8587 |
+
"loss": 2.2045,
|
8588 |
+
"step": 1221
|
8589 |
+
},
|
8590 |
+
{
|
8591 |
+
"epoch": 0.20680318158740904,
|
8592 |
+
"grad_norm": 4.306844234466553,
|
8593 |
+
"learning_rate": 4.786664050562372e-06,
|
8594 |
+
"loss": 2.3499,
|
8595 |
+
"step": 1222
|
8596 |
+
},
|
8597 |
+
{
|
8598 |
+
"epoch": 0.20697241496023017,
|
8599 |
+
"grad_norm": 5.108699798583984,
|
8600 |
+
"learning_rate": 4.715523748543027e-06,
|
8601 |
+
"loss": 3.078,
|
8602 |
+
"step": 1223
|
8603 |
+
},
|
8604 |
+
{
|
8605 |
+
"epoch": 0.20714164833305126,
|
8606 |
+
"grad_norm": 4.530353546142578,
|
8607 |
+
"learning_rate": 4.644903295424951e-06,
|
8608 |
+
"loss": 2.3786,
|
8609 |
+
"step": 1224
|
8610 |
+
},
|
8611 |
+
{
|
8612 |
+
"epoch": 0.2073108817058724,
|
8613 |
+
"grad_norm": 4.354692459106445,
|
8614 |
+
"learning_rate": 4.574803076496148e-06,
|
8615 |
+
"loss": 2.6458,
|
8616 |
+
"step": 1225
|
8617 |
+
},
|
8618 |
+
{
|
8619 |
+
"epoch": 0.20748011507869352,
|
8620 |
+
"grad_norm": 3.7937848567962646,
|
8621 |
+
"learning_rate": 4.50522347420631e-06,
|
8622 |
+
"loss": 2.5696,
|
8623 |
+
"step": 1226
|
8624 |
+
},
|
8625 |
+
{
|
8626 |
+
"epoch": 0.20764934845151464,
|
8627 |
+
"grad_norm": 4.121518135070801,
|
8628 |
+
"learning_rate": 4.4361648681648275e-06,
|
8629 |
+
"loss": 2.8103,
|
8630 |
+
"step": 1227
|
8631 |
+
},
|
8632 |
+
{
|
8633 |
+
"epoch": 0.20781858182433577,
|
8634 |
+
"grad_norm": 4.5979132652282715,
|
8635 |
+
"learning_rate": 4.367627635138649e-06,
|
8636 |
+
"loss": 2.6451,
|
8637 |
+
"step": 1228
|
8638 |
+
},
|
8639 |
+
{
|
8640 |
+
"epoch": 0.20798781519715687,
|
8641 |
+
"grad_norm": 4.663356781005859,
|
8642 |
+
"learning_rate": 4.299612149050214e-06,
|
8643 |
+
"loss": 2.6051,
|
8644 |
+
"step": 1229
|
8645 |
+
},
|
8646 |
+
{
|
8647 |
+
"epoch": 0.208157048569978,
|
8648 |
+
"grad_norm": 4.623002529144287,
|
8649 |
+
"learning_rate": 4.232118780975447e-06,
|
8650 |
+
"loss": 2.5598,
|
8651 |
+
"step": 1230
|
8652 |
+
},
|
8653 |
+
{
|
8654 |
+
"epoch": 0.20832628194279912,
|
8655 |
+
"grad_norm": 4.842830181121826,
|
8656 |
+
"learning_rate": 4.165147899141764e-06,
|
8657 |
+
"loss": 2.7728,
|
8658 |
+
"step": 1231
|
8659 |
+
},
|
8660 |
+
{
|
8661 |
+
"epoch": 0.20849551531562024,
|
8662 |
+
"grad_norm": 4.952005386352539,
|
8663 |
+
"learning_rate": 4.098699868925937e-06,
|
8664 |
+
"loss": 2.367,
|
8665 |
+
"step": 1232
|
8666 |
+
},
|
8667 |
+
{
|
8668 |
+
"epoch": 0.20866474868844137,
|
8669 |
+
"grad_norm": 3.993809938430786,
|
8670 |
+
"learning_rate": 4.032775052852278e-06,
|
8671 |
+
"loss": 2.3294,
|
8672 |
+
"step": 1233
|
8673 |
+
},
|
8674 |
+
{
|
8675 |
+
"epoch": 0.20883398206126247,
|
8676 |
+
"grad_norm": 3.9403116703033447,
|
8677 |
+
"learning_rate": 3.967373810590524e-06,
|
8678 |
+
"loss": 2.1608,
|
8679 |
+
"step": 1234
|
8680 |
+
},
|
8681 |
+
{
|
8682 |
+
"epoch": 0.2090032154340836,
|
8683 |
+
"grad_norm": 3.8375799655914307,
|
8684 |
+
"learning_rate": 3.902496498953923e-06,
|
8685 |
+
"loss": 2.4127,
|
8686 |
+
"step": 1235
|
8687 |
+
},
|
8688 |
+
{
|
8689 |
+
"epoch": 0.20917244880690472,
|
8690 |
+
"grad_norm": 3.6065914630889893,
|
8691 |
+
"learning_rate": 3.838143471897304e-06,
|
8692 |
+
"loss": 2.3595,
|
8693 |
+
"step": 1236
|
8694 |
+
},
|
8695 |
+
{
|
8696 |
+
"epoch": 0.20934168217972585,
|
8697 |
+
"grad_norm": 4.806676387786865,
|
8698 |
+
"learning_rate": 3.7743150805150897e-06,
|
8699 |
+
"loss": 2.6786,
|
8700 |
+
"step": 1237
|
8701 |
+
},
|
8702 |
+
{
|
8703 |
+
"epoch": 0.20951091555254697,
|
8704 |
+
"grad_norm": 4.4231343269348145,
|
8705 |
+
"learning_rate": 3.7110116730394552e-06,
|
8706 |
+
"loss": 2.6157,
|
8707 |
+
"step": 1238
|
8708 |
+
},
|
8709 |
+
{
|
8710 |
+
"epoch": 0.20968014892536807,
|
8711 |
+
"grad_norm": 3.851938247680664,
|
8712 |
+
"learning_rate": 3.6482335948383684e-06,
|
8713 |
+
"loss": 2.6559,
|
8714 |
+
"step": 1239
|
8715 |
+
},
|
8716 |
+
{
|
8717 |
+
"epoch": 0.2098493822981892,
|
8718 |
+
"grad_norm": 2.9522087574005127,
|
8719 |
+
"learning_rate": 3.585981188413767e-06,
|
8720 |
+
"loss": 2.2154,
|
8721 |
+
"step": 1240
|
8722 |
+
},
|
8723 |
+
{
|
8724 |
+
"epoch": 0.21001861567101032,
|
8725 |
+
"grad_norm": 4.296061038970947,
|
8726 |
+
"learning_rate": 3.524254793399584e-06,
|
8727 |
+
"loss": 2.36,
|
8728 |
+
"step": 1241
|
8729 |
+
},
|
8730 |
+
{
|
8731 |
+
"epoch": 0.21018784904383145,
|
8732 |
+
"grad_norm": 3.9503211975097656,
|
8733 |
+
"learning_rate": 3.4630547465600128e-06,
|
8734 |
+
"loss": 2.3364,
|
8735 |
+
"step": 1242
|
8736 |
+
},
|
8737 |
+
{
|
8738 |
+
"epoch": 0.21035708241665257,
|
8739 |
+
"grad_norm": 4.650412082672119,
|
8740 |
+
"learning_rate": 3.402381381787623e-06,
|
8741 |
+
"loss": 2.4274,
|
8742 |
+
"step": 1243
|
8743 |
+
},
|
8744 |
+
{
|
8745 |
+
"epoch": 0.21052631578947367,
|
8746 |
+
"grad_norm": 5.22102165222168,
|
8747 |
+
"learning_rate": 3.3422350301014925e-06,
|
8748 |
+
"loss": 2.4891,
|
8749 |
+
"step": 1244
|
8750 |
+
},
|
8751 |
+
{
|
8752 |
+
"epoch": 0.2106955491622948,
|
8753 |
+
"grad_norm": 4.8145341873168945,
|
8754 |
+
"learning_rate": 3.2826160196455123e-06,
|
8755 |
+
"loss": 2.3191,
|
8756 |
+
"step": 1245
|
8757 |
+
},
|
8758 |
+
{
|
8759 |
+
"epoch": 0.21086478253511592,
|
8760 |
+
"grad_norm": 5.002470016479492,
|
8761 |
+
"learning_rate": 3.22352467568644e-06,
|
8762 |
+
"loss": 2.4811,
|
8763 |
+
"step": 1246
|
8764 |
+
},
|
8765 |
+
{
|
8766 |
+
"epoch": 0.21103401590793705,
|
8767 |
+
"grad_norm": 4.70563268661499,
|
8768 |
+
"learning_rate": 3.164961320612281e-06,
|
8769 |
+
"loss": 2.8949,
|
8770 |
+
"step": 1247
|
8771 |
+
},
|
8772 |
+
{
|
8773 |
+
"epoch": 0.21120324928075818,
|
8774 |
+
"grad_norm": 4.246470928192139,
|
8775 |
+
"learning_rate": 3.106926273930455e-06,
|
8776 |
+
"loss": 2.5197,
|
8777 |
+
"step": 1248
|
8778 |
+
},
|
8779 |
+
{
|
8780 |
+
"epoch": 0.21137248265357927,
|
8781 |
+
"grad_norm": 4.903048515319824,
|
8782 |
+
"learning_rate": 3.049419852266022e-06,
|
8783 |
+
"loss": 3.1415,
|
8784 |
+
"step": 1249
|
8785 |
+
},
|
8786 |
+
{
|
8787 |
+
"epoch": 0.2115417160264004,
|
8788 |
+
"grad_norm": 4.4931206703186035,
|
8789 |
+
"learning_rate": 2.9924423693600155e-06,
|
8790 |
+
"loss": 2.4481,
|
8791 |
+
"step": 1250
|
8792 |
+
},
|
8793 |
+
{
|
8794 |
+
"epoch": 0.21171094939922153,
|
8795 |
+
"grad_norm": 4.0619797706604,
|
8796 |
+
"learning_rate": 2.9359941360677434e-06,
|
8797 |
+
"loss": 3.1532,
|
8798 |
+
"step": 1251
|
8799 |
+
},
|
8800 |
+
{
|
8801 |
+
"epoch": 0.21188018277204265,
|
8802 |
+
"grad_norm": 4.9033403396606445,
|
8803 |
+
"learning_rate": 2.880075460356979e-06,
|
8804 |
+
"loss": 3.3105,
|
8805 |
+
"step": 1252
|
8806 |
+
},
|
8807 |
+
{
|
8808 |
+
"epoch": 0.21204941614486378,
|
8809 |
+
"grad_norm": 3.9951674938201904,
|
8810 |
+
"learning_rate": 2.824686647306407e-06,
|
8811 |
+
"loss": 2.5144,
|
8812 |
+
"step": 1253
|
8813 |
+
},
|
8814 |
+
{
|
8815 |
+
"epoch": 0.21221864951768488,
|
8816 |
+
"grad_norm": 4.363746643066406,
|
8817 |
+
"learning_rate": 2.7698279991039354e-06,
|
8818 |
+
"loss": 2.8359,
|
8819 |
+
"step": 1254
|
8820 |
+
},
|
8821 |
+
{
|
8822 |
+
"epoch": 0.212387882890506,
|
8823 |
+
"grad_norm": 4.142307758331299,
|
8824 |
+
"learning_rate": 2.715499815044964e-06,
|
8825 |
+
"loss": 2.4757,
|
8826 |
+
"step": 1255
|
8827 |
+
},
|
8828 |
+
{
|
8829 |
+
"epoch": 0.21255711626332713,
|
8830 |
+
"grad_norm": 4.6256818771362305,
|
8831 |
+
"learning_rate": 2.6617023915308404e-06,
|
8832 |
+
"loss": 2.2541,
|
8833 |
+
"step": 1256
|
8834 |
+
},
|
8835 |
+
{
|
8836 |
+
"epoch": 0.21272634963614825,
|
8837 |
+
"grad_norm": 4.097128868103027,
|
8838 |
+
"learning_rate": 2.608436022067218e-06,
|
8839 |
+
"loss": 2.6685,
|
8840 |
+
"step": 1257
|
8841 |
+
},
|
8842 |
+
{
|
8843 |
+
"epoch": 0.21289558300896938,
|
8844 |
+
"grad_norm": 3.558000326156616,
|
8845 |
+
"learning_rate": 2.5557009972624333e-06,
|
8846 |
+
"loss": 2.4689,
|
8847 |
+
"step": 1258
|
8848 |
+
},
|
8849 |
+
{
|
8850 |
+
"epoch": 0.21306481638179048,
|
8851 |
+
"grad_norm": 4.669186115264893,
|
8852 |
+
"learning_rate": 2.503497604825933e-06,
|
8853 |
+
"loss": 2.5567,
|
8854 |
+
"step": 1259
|
8855 |
+
},
|
8856 |
+
{
|
8857 |
+
"epoch": 0.2132340497546116,
|
8858 |
+
"grad_norm": 3.9236512184143066,
|
8859 |
+
"learning_rate": 2.4518261295667254e-06,
|
8860 |
+
"loss": 3.1424,
|
8861 |
+
"step": 1260
|
8862 |
+
},
|
8863 |
+
{
|
8864 |
+
"epoch": 0.21340328312743273,
|
8865 |
+
"grad_norm": 4.533042907714844,
|
8866 |
+
"learning_rate": 2.4006868533917983e-06,
|
8867 |
+
"loss": 2.7145,
|
8868 |
+
"step": 1261
|
8869 |
+
},
|
8870 |
+
{
|
8871 |
+
"epoch": 0.21357251650025386,
|
8872 |
+
"grad_norm": 3.7172744274139404,
|
8873 |
+
"learning_rate": 2.350080055304571e-06,
|
8874 |
+
"loss": 2.2762,
|
8875 |
+
"step": 1262
|
8876 |
+
},
|
8877 |
+
{
|
8878 |
+
"epoch": 0.21374174987307498,
|
8879 |
+
"grad_norm": 4.892393589019775,
|
8880 |
+
"learning_rate": 2.300006011403455e-06,
|
8881 |
+
"loss": 3.033,
|
8882 |
+
"step": 1263
|
8883 |
+
},
|
8884 |
+
{
|
8885 |
+
"epoch": 0.21391098324589608,
|
8886 |
+
"grad_norm": 3.32370662689209,
|
8887 |
+
"learning_rate": 2.250464994880186e-06,
|
8888 |
+
"loss": 1.7894,
|
8889 |
+
"step": 1264
|
8890 |
+
},
|
8891 |
+
{
|
8892 |
+
"epoch": 0.2140802166187172,
|
8893 |
+
"grad_norm": 3.335549831390381,
|
8894 |
+
"learning_rate": 2.201457276018526e-06,
|
8895 |
+
"loss": 2.3197,
|
8896 |
+
"step": 1265
|
8897 |
+
},
|
8898 |
+
{
|
8899 |
+
"epoch": 0.21424944999153833,
|
8900 |
+
"grad_norm": 4.068175792694092,
|
8901 |
+
"learning_rate": 2.1529831221926423e-06,
|
8902 |
+
"loss": 2.8426,
|
8903 |
+
"step": 1266
|
8904 |
+
},
|
8905 |
+
{
|
8906 |
+
"epoch": 0.21441868336435946,
|
8907 |
+
"grad_norm": 4.415652275085449,
|
8908 |
+
"learning_rate": 2.1050427978657307e-06,
|
8909 |
+
"loss": 2.8797,
|
8910 |
+
"step": 1267
|
8911 |
+
},
|
8912 |
+
{
|
8913 |
+
"epoch": 0.21458791673718058,
|
8914 |
+
"grad_norm": 3.9378819465637207,
|
8915 |
+
"learning_rate": 2.0576365645884944e-06,
|
8916 |
+
"loss": 2.3433,
|
8917 |
+
"step": 1268
|
8918 |
+
},
|
8919 |
+
{
|
8920 |
+
"epoch": 0.21475715011000168,
|
8921 |
+
"grad_norm": 4.477380275726318,
|
8922 |
+
"learning_rate": 2.010764680997823e-06,
|
8923 |
+
"loss": 2.2881,
|
8924 |
+
"step": 1269
|
8925 |
+
},
|
8926 |
+
{
|
8927 |
+
"epoch": 0.2149263834828228,
|
8928 |
+
"grad_norm": 4.02888822555542,
|
8929 |
+
"learning_rate": 1.964427402815294e-06,
|
8930 |
+
"loss": 2.5909,
|
8931 |
+
"step": 1270
|
8932 |
+
},
|
8933 |
+
{
|
8934 |
+
"epoch": 0.21509561685564393,
|
8935 |
+
"grad_norm": 3.857497215270996,
|
8936 |
+
"learning_rate": 1.9186249828458292e-06,
|
8937 |
+
"loss": 2.5508,
|
8938 |
+
"step": 1271
|
8939 |
+
},
|
8940 |
+
{
|
8941 |
+
"epoch": 0.21526485022846506,
|
8942 |
+
"grad_norm": 3.4592599868774414,
|
8943 |
+
"learning_rate": 1.8733576709762613e-06,
|
8944 |
+
"loss": 1.7915,
|
8945 |
+
"step": 1272
|
8946 |
+
},
|
8947 |
+
{
|
8948 |
+
"epoch": 0.21543408360128619,
|
8949 |
+
"grad_norm": 4.474016189575195,
|
8950 |
+
"learning_rate": 1.828625714174015e-06,
|
8951 |
+
"loss": 2.3372,
|
8952 |
+
"step": 1273
|
8953 |
+
},
|
8954 |
+
{
|
8955 |
+
"epoch": 0.21560331697410728,
|
8956 |
+
"grad_norm": 3.6105899810791016,
|
8957 |
+
"learning_rate": 1.784429356485784e-06,
|
8958 |
+
"loss": 1.9123,
|
8959 |
+
"step": 1274
|
8960 |
+
},
|
8961 |
+
{
|
8962 |
+
"epoch": 0.2157725503469284,
|
8963 |
+
"grad_norm": 3.641062021255493,
|
8964 |
+
"learning_rate": 1.740768839036111e-06,
|
8965 |
+
"loss": 2.2055,
|
8966 |
+
"step": 1275
|
8967 |
+
},
|
8968 |
+
{
|
8969 |
+
"epoch": 0.21594178371974954,
|
8970 |
+
"grad_norm": 4.652203559875488,
|
8971 |
+
"learning_rate": 1.6976444000261439e-06,
|
8972 |
+
"loss": 2.4847,
|
8973 |
+
"step": 1276
|
8974 |
+
},
|
8975 |
+
{
|
8976 |
+
"epoch": 0.21611101709257066,
|
8977 |
+
"grad_norm": 3.616909980773926,
|
8978 |
+
"learning_rate": 1.6550562747323473e-06,
|
8979 |
+
"loss": 2.2506,
|
8980 |
+
"step": 1277
|
8981 |
+
},
|
8982 |
+
{
|
8983 |
+
"epoch": 0.2162802504653918,
|
8984 |
+
"grad_norm": 3.7411768436431885,
|
8985 |
+
"learning_rate": 1.613004695505138e-06,
|
8986 |
+
"loss": 2.4802,
|
8987 |
+
"step": 1278
|
8988 |
+
},
|
8989 |
+
{
|
8990 |
+
"epoch": 0.21644948383821289,
|
8991 |
+
"grad_norm": 4.339770317077637,
|
8992 |
+
"learning_rate": 1.5714898917676969e-06,
|
8993 |
+
"loss": 2.437,
|
8994 |
+
"step": 1279
|
8995 |
+
},
|
8996 |
+
{
|
8997 |
+
"epoch": 0.216618717211034,
|
8998 |
+
"grad_norm": 3.193618059158325,
|
8999 |
+
"learning_rate": 1.5305120900146908e-06,
|
9000 |
+
"loss": 2.1883,
|
9001 |
+
"step": 1280
|
9002 |
+
},
|
9003 |
+
{
|
9004 |
+
"epoch": 0.21678795058385514,
|
9005 |
+
"grad_norm": 5.118531703948975,
|
9006 |
+
"learning_rate": 1.4900715138110311e-06,
|
9007 |
+
"loss": 2.5204,
|
9008 |
+
"step": 1281
|
9009 |
+
},
|
9010 |
+
{
|
9011 |
+
"epoch": 0.21695718395667626,
|
9012 |
+
"grad_norm": 4.23779821395874,
|
9013 |
+
"learning_rate": 1.450168383790651e-06,
|
9014 |
+
"loss": 2.7185,
|
9015 |
+
"step": 1282
|
9016 |
+
},
|
9017 |
+
{
|
9018 |
+
"epoch": 0.2171264173294974,
|
9019 |
+
"grad_norm": 3.5734786987304688,
|
9020 |
+
"learning_rate": 1.4108029176553073e-06,
|
9021 |
+
"loss": 2.7764,
|
9022 |
+
"step": 1283
|
9023 |
+
},
|
9024 |
+
{
|
9025 |
+
"epoch": 0.2172956507023185,
|
9026 |
+
"grad_norm": 3.71058988571167,
|
9027 |
+
"learning_rate": 1.371975330173403e-06,
|
9028 |
+
"loss": 2.218,
|
9029 |
+
"step": 1284
|
9030 |
+
},
|
9031 |
+
{
|
9032 |
+
"epoch": 0.21746488407513961,
|
9033 |
+
"grad_norm": 3.7329723834991455,
|
9034 |
+
"learning_rate": 1.3336858331787993e-06,
|
9035 |
+
"loss": 2.5567,
|
9036 |
+
"step": 1285
|
9037 |
+
},
|
9038 |
+
{
|
9039 |
+
"epoch": 0.21763411744796074,
|
9040 |
+
"grad_norm": 3.9438014030456543,
|
9041 |
+
"learning_rate": 1.2959346355696844e-06,
|
9042 |
+
"loss": 2.0118,
|
9043 |
+
"step": 1286
|
9044 |
+
},
|
9045 |
+
{
|
9046 |
+
"epoch": 0.21780335082078187,
|
9047 |
+
"grad_norm": 4.415074825286865,
|
9048 |
+
"learning_rate": 1.2587219433073616e-06,
|
9049 |
+
"loss": 2.4167,
|
9050 |
+
"step": 1287
|
9051 |
+
},
|
9052 |
+
{
|
9053 |
+
"epoch": 0.217972584193603,
|
9054 |
+
"grad_norm": 4.447765350341797,
|
9055 |
+
"learning_rate": 1.2220479594152513e-06,
|
9056 |
+
"loss": 2.1415,
|
9057 |
+
"step": 1288
|
9058 |
+
},
|
9059 |
+
{
|
9060 |
+
"epoch": 0.2181418175664241,
|
9061 |
+
"grad_norm": 4.770878791809082,
|
9062 |
+
"learning_rate": 1.1859128839776801e-06,
|
9063 |
+
"loss": 2.7997,
|
9064 |
+
"step": 1289
|
9065 |
+
},
|
9066 |
+
{
|
9067 |
+
"epoch": 0.21831105093924522,
|
9068 |
+
"grad_norm": 4.185766696929932,
|
9069 |
+
"learning_rate": 1.1503169141388047e-06,
|
9070 |
+
"loss": 3.0387,
|
9071 |
+
"step": 1290
|
9072 |
+
},
|
9073 |
+
{
|
9074 |
+
"epoch": 0.21848028431206634,
|
9075 |
+
"grad_norm": 3.833902359008789,
|
9076 |
+
"learning_rate": 1.115260244101579e-06,
|
9077 |
+
"loss": 1.982,
|
9078 |
+
"step": 1291
|
9079 |
+
},
|
9080 |
+
{
|
9081 |
+
"epoch": 0.21864951768488747,
|
9082 |
+
"grad_norm": 3.850200653076172,
|
9083 |
+
"learning_rate": 1.0807430651266436e-06,
|
9084 |
+
"loss": 2.169,
|
9085 |
+
"step": 1292
|
9086 |
+
},
|
9087 |
+
{
|
9088 |
+
"epoch": 0.2188187510577086,
|
9089 |
+
"grad_norm": 4.9232587814331055,
|
9090 |
+
"learning_rate": 1.046765565531349e-06,
|
9091 |
+
"loss": 2.2588,
|
9092 |
+
"step": 1293
|
9093 |
+
},
|
9094 |
+
{
|
9095 |
+
"epoch": 0.2189879844305297,
|
9096 |
+
"grad_norm": 3.386655569076538,
|
9097 |
+
"learning_rate": 1.0133279306886346e-06,
|
9098 |
+
"loss": 1.8471,
|
9099 |
+
"step": 1294
|
9100 |
+
},
|
9101 |
+
{
|
9102 |
+
"epoch": 0.21915721780335082,
|
9103 |
+
"grad_norm": 3.79015851020813,
|
9104 |
+
"learning_rate": 9.804303430261174e-07,
|
9105 |
+
"loss": 2.2161,
|
9106 |
+
"step": 1295
|
9107 |
+
},
|
9108 |
+
{
|
9109 |
+
"epoch": 0.21932645117617194,
|
9110 |
+
"grad_norm": 5.91008996963501,
|
9111 |
+
"learning_rate": 9.480729820249945e-07,
|
9112 |
+
"loss": 2.61,
|
9113 |
+
"step": 1296
|
9114 |
+
},
|
9115 |
+
{
|
9116 |
+
"epoch": 0.21949568454899307,
|
9117 |
+
"grad_norm": 4.494709014892578,
|
9118 |
+
"learning_rate": 9.162560242191865e-07,
|
9119 |
+
"loss": 3.1942,
|
9120 |
+
"step": 1297
|
9121 |
+
},
|
9122 |
+
{
|
9123 |
+
"epoch": 0.2196649179218142,
|
9124 |
+
"grad_norm": 3.7651846408843994,
|
9125 |
+
"learning_rate": 8.849796431942392e-07,
|
9126 |
+
"loss": 2.6106,
|
9127 |
+
"step": 1298
|
9128 |
+
},
|
9129 |
+
{
|
9130 |
+
"epoch": 0.2198341512946353,
|
9131 |
+
"grad_norm": 4.493137836456299,
|
9132 |
+
"learning_rate": 8.542440095864801e-07,
|
9133 |
+
"loss": 2.2617,
|
9134 |
+
"step": 1299
|
9135 |
+
},
|
9136 |
+
{
|
9137 |
+
"epoch": 0.22000338466745642,
|
9138 |
+
"grad_norm": 4.114045143127441,
|
9139 |
+
"learning_rate": 8.240492910820408e-07,
|
9140 |
+
"loss": 2.2999,
|
9141 |
+
"step": 1300
|
9142 |
+
},
|
9143 |
+
{
|
9144 |
+
"epoch": 0.22017261804027755,
|
9145 |
+
"grad_norm": 6.44645357131958,
|
9146 |
+
"learning_rate": 7.943956524159579e-07,
|
9147 |
+
"loss": 2.3555,
|
9148 |
+
"step": 1301
|
9149 |
+
},
|
9150 |
+
{
|
9151 |
+
"epoch": 0.22034185141309867,
|
9152 |
+
"grad_norm": 4.185930252075195,
|
9153 |
+
"learning_rate": 7.65283255371263e-07,
|
9154 |
+
"loss": 2.302,
|
9155 |
+
"step": 1302
|
9156 |
+
},
|
9157 |
+
{
|
9158 |
+
"epoch": 0.22051108478591977,
|
9159 |
+
"grad_norm": 4.319748878479004,
|
9160 |
+
"learning_rate": 7.36712258778105e-07,
|
9161 |
+
"loss": 2.7715,
|
9162 |
+
"step": 1303
|
9163 |
+
},
|
9164 |
+
{
|
9165 |
+
"epoch": 0.2206803181587409,
|
9166 |
+
"grad_norm": 5.204176902770996,
|
9167 |
+
"learning_rate": 7.086828185128846e-07,
|
9168 |
+
"loss": 2.8187,
|
9169 |
+
"step": 1304
|
9170 |
+
},
|
9171 |
+
{
|
9172 |
+
"epoch": 0.22084955153156202,
|
9173 |
+
"grad_norm": 4.408200740814209,
|
9174 |
+
"learning_rate": 6.811950874973993e-07,
|
9175 |
+
"loss": 2.6998,
|
9176 |
+
"step": 1305
|
9177 |
+
},
|
9178 |
+
{
|
9179 |
+
"epoch": 0.22101878490438315,
|
9180 |
+
"grad_norm": 3.811539888381958,
|
9181 |
+
"learning_rate": 6.542492156980107e-07,
|
9182 |
+
"loss": 2.3083,
|
9183 |
+
"step": 1306
|
9184 |
+
},
|
9185 |
+
{
|
9186 |
+
"epoch": 0.22118801827720427,
|
9187 |
+
"grad_norm": 3.8305625915527344,
|
9188 |
+
"learning_rate": 6.27845350124845e-07,
|
9189 |
+
"loss": 2.1068,
|
9190 |
+
"step": 1307
|
9191 |
+
},
|
9192 |
+
{
|
9193 |
+
"epoch": 0.22135725165002537,
|
9194 |
+
"grad_norm": 3.9882454872131348,
|
9195 |
+
"learning_rate": 6.019836348309716e-07,
|
9196 |
+
"loss": 2.0347,
|
9197 |
+
"step": 1308
|
9198 |
+
},
|
9199 |
+
{
|
9200 |
+
"epoch": 0.2215264850228465,
|
9201 |
+
"grad_norm": 4.82699728012085,
|
9202 |
+
"learning_rate": 5.766642109115927e-07,
|
9203 |
+
"loss": 2.902,
|
9204 |
+
"step": 1309
|
9205 |
+
},
|
9206 |
+
{
|
9207 |
+
"epoch": 0.22169571839566762,
|
9208 |
+
"grad_norm": 3.833846092224121,
|
9209 |
+
"learning_rate": 5.518872165033329e-07,
|
9210 |
+
"loss": 2.4246,
|
9211 |
+
"step": 1310
|
9212 |
+
},
|
9213 |
+
{
|
9214 |
+
"epoch": 0.22186495176848875,
|
9215 |
+
"grad_norm": 4.534404277801514,
|
9216 |
+
"learning_rate": 5.27652786783428e-07,
|
9217 |
+
"loss": 2.7814,
|
9218 |
+
"step": 1311
|
9219 |
+
},
|
9220 |
+
{
|
9221 |
+
"epoch": 0.22203418514130988,
|
9222 |
+
"grad_norm": 4.554502010345459,
|
9223 |
+
"learning_rate": 5.039610539690376e-07,
|
9224 |
+
"loss": 2.9594,
|
9225 |
+
"step": 1312
|
9226 |
+
},
|
9227 |
+
{
|
9228 |
+
"epoch": 0.22220341851413097,
|
9229 |
+
"grad_norm": 4.620689868927002,
|
9230 |
+
"learning_rate": 4.808121473164894e-07,
|
9231 |
+
"loss": 2.7049,
|
9232 |
+
"step": 1313
|
9233 |
+
},
|
9234 |
+
{
|
9235 |
+
"epoch": 0.2223726518869521,
|
9236 |
+
"grad_norm": 4.360732078552246,
|
9237 |
+
"learning_rate": 4.5820619312058057e-07,
|
9238 |
+
"loss": 2.7786,
|
9239 |
+
"step": 1314
|
9240 |
+
},
|
9241 |
+
{
|
9242 |
+
"epoch": 0.22254188525977323,
|
9243 |
+
"grad_norm": 4.134099960327148,
|
9244 |
+
"learning_rate": 4.3614331471387714e-07,
|
9245 |
+
"loss": 2.3839,
|
9246 |
+
"step": 1315
|
9247 |
+
},
|
9248 |
+
{
|
9249 |
+
"epoch": 0.22271111863259435,
|
9250 |
+
"grad_norm": 5.135616779327393,
|
9251 |
+
"learning_rate": 4.146236324660824e-07,
|
9252 |
+
"loss": 2.7121,
|
9253 |
+
"step": 1316
|
9254 |
+
},
|
9255 |
+
{
|
9256 |
+
"epoch": 0.22288035200541548,
|
9257 |
+
"grad_norm": 3.924476146697998,
|
9258 |
+
"learning_rate": 3.936472637833477e-07,
|
9259 |
+
"loss": 2.7642,
|
9260 |
+
"step": 1317
|
9261 |
+
},
|
9262 |
+
{
|
9263 |
+
"epoch": 0.22304958537823658,
|
9264 |
+
"grad_norm": 4.798012733459473,
|
9265 |
+
"learning_rate": 3.732143231076179e-07,
|
9266 |
+
"loss": 3.1103,
|
9267 |
+
"step": 1318
|
9268 |
+
},
|
9269 |
+
{
|
9270 |
+
"epoch": 0.2232188187510577,
|
9271 |
+
"grad_norm": 3.97001576423645,
|
9272 |
+
"learning_rate": 3.53324921916065e-07,
|
9273 |
+
"loss": 2.5676,
|
9274 |
+
"step": 1319
|
9275 |
+
},
|
9276 |
+
{
|
9277 |
+
"epoch": 0.22338805212387883,
|
9278 |
+
"grad_norm": 5.0922160148620605,
|
9279 |
+
"learning_rate": 3.339791687203997e-07,
|
9280 |
+
"loss": 2.3982,
|
9281 |
+
"step": 1320
|
9282 |
+
},
|
9283 |
+
{
|
9284 |
+
"epoch": 0.22355728549669995,
|
9285 |
+
"grad_norm": 4.507490634918213,
|
9286 |
+
"learning_rate": 3.151771690663496e-07,
|
9287 |
+
"loss": 2.706,
|
9288 |
+
"step": 1321
|
9289 |
+
},
|
9290 |
+
{
|
9291 |
+
"epoch": 0.22372651886952108,
|
9292 |
+
"grad_norm": 3.820155143737793,
|
9293 |
+
"learning_rate": 2.9691902553303783e-07,
|
9294 |
+
"loss": 2.3196,
|
9295 |
+
"step": 1322
|
9296 |
+
},
|
9297 |
+
{
|
9298 |
+
"epoch": 0.22389575224234218,
|
9299 |
+
"grad_norm": 4.559001445770264,
|
9300 |
+
"learning_rate": 2.7920483773246076e-07,
|
9301 |
+
"loss": 2.1678,
|
9302 |
+
"step": 1323
|
9303 |
+
},
|
9304 |
+
{
|
9305 |
+
"epoch": 0.2240649856151633,
|
9306 |
+
"grad_norm": 4.293283939361572,
|
9307 |
+
"learning_rate": 2.6203470230891094e-07,
|
9308 |
+
"loss": 2.6473,
|
9309 |
+
"step": 1324
|
9310 |
+
},
|
9311 |
+
{
|
9312 |
+
"epoch": 0.22423421898798443,
|
9313 |
+
"grad_norm": 4.640307903289795,
|
9314 |
+
"learning_rate": 2.4540871293845524e-07,
|
9315 |
+
"loss": 2.2306,
|
9316 |
+
"step": 1325
|
9317 |
+
},
|
9318 |
+
{
|
9319 |
+
"epoch": 0.22440345236080556,
|
9320 |
+
"grad_norm": 4.0326457023620605,
|
9321 |
+
"learning_rate": 2.2932696032846867e-07,
|
9322 |
+
"loss": 2.3978,
|
9323 |
+
"step": 1326
|
9324 |
+
},
|
9325 |
+
{
|
9326 |
+
"epoch": 0.22457268573362668,
|
9327 |
+
"grad_norm": 4.09109354019165,
|
9328 |
+
"learning_rate": 2.137895322170458e-07,
|
9329 |
+
"loss": 2.2967,
|
9330 |
+
"step": 1327
|
9331 |
+
},
|
9332 |
+
{
|
9333 |
+
"epoch": 0.22474191910644778,
|
9334 |
+
"grad_norm": 4.141595363616943,
|
9335 |
+
"learning_rate": 1.987965133726344e-07,
|
9336 |
+
"loss": 2.9463,
|
9337 |
+
"step": 1328
|
9338 |
+
},
|
9339 |
+
{
|
9340 |
+
"epoch": 0.2249111524792689,
|
9341 |
+
"grad_norm": 4.454281806945801,
|
9342 |
+
"learning_rate": 1.8434798559349153e-07,
|
9343 |
+
"loss": 2.69,
|
9344 |
+
"step": 1329
|
9345 |
+
},
|
9346 |
+
{
|
9347 |
+
"epoch": 0.22508038585209003,
|
9348 |
+
"grad_norm": 4.567571640014648,
|
9349 |
+
"learning_rate": 1.7044402770725055e-07,
|
9350 |
+
"loss": 2.8975,
|
9351 |
+
"step": 1330
|
9352 |
+
},
|
9353 |
+
{
|
9354 |
+
"epoch": 0.22524961922491116,
|
9355 |
+
"grad_norm": 3.8534817695617676,
|
9356 |
+
"learning_rate": 1.570847155705435e-07,
|
9357 |
+
"loss": 2.1732,
|
9358 |
+
"step": 1331
|
9359 |
+
},
|
9360 |
+
{
|
9361 |
+
"epoch": 0.22541885259773228,
|
9362 |
+
"grad_norm": 3.491701364517212,
|
9363 |
+
"learning_rate": 1.4427012206851276e-07,
|
9364 |
+
"loss": 2.0317,
|
9365 |
+
"step": 1332
|
9366 |
+
},
|
9367 |
+
{
|
9368 |
+
"epoch": 0.22558808597055338,
|
9369 |
+
"grad_norm": 4.23175573348999,
|
9370 |
+
"learning_rate": 1.320003171144446e-07,
|
9371 |
+
"loss": 2.8074,
|
9372 |
+
"step": 1333
|
9373 |
+
},
|
9374 |
+
{
|
9375 |
+
"epoch": 0.2257573193433745,
|
9376 |
+
"grad_norm": 3.740814447402954,
|
9377 |
+
"learning_rate": 1.2027536764943614e-07,
|
9378 |
+
"loss": 2.0992,
|
9379 |
+
"step": 1334
|
9380 |
+
},
|
9381 |
+
{
|
9382 |
+
"epoch": 0.22592655271619563,
|
9383 |
+
"grad_norm": 4.385926723480225,
|
9384 |
+
"learning_rate": 1.0909533764194014e-07,
|
9385 |
+
"loss": 2.9652,
|
9386 |
+
"step": 1335
|
9387 |
+
},
|
9388 |
+
{
|
9389 |
+
"epoch": 0.22609578608901676,
|
9390 |
+
"grad_norm": 4.270120143890381,
|
9391 |
+
"learning_rate": 9.846028808748742e-08,
|
9392 |
+
"loss": 2.3998,
|
9393 |
+
"step": 1336
|
9394 |
+
},
|
9395 |
+
{
|
9396 |
+
"epoch": 0.22626501946183789,
|
9397 |
+
"grad_norm": 4.28633975982666,
|
9398 |
+
"learning_rate": 8.837027700830946e-08,
|
9399 |
+
"loss": 2.5314,
|
9400 |
+
"step": 1337
|
9401 |
+
},
|
9402 |
+
{
|
9403 |
+
"epoch": 0.22643425283465898,
|
9404 |
+
"grad_norm": 5.241239070892334,
|
9405 |
+
"learning_rate": 7.882535945304969e-08,
|
9406 |
+
"loss": 2.4356,
|
9407 |
+
"step": 1338
|
9408 |
+
},
|
9409 |
+
{
|
9410 |
+
"epoch": 0.2266034862074801,
|
9411 |
+
"grad_norm": 3.720529794692993,
|
9412 |
+
"learning_rate": 6.982558749646374e-08,
|
9413 |
+
"loss": 2.3675,
|
9414 |
+
"step": 1339
|
9415 |
+
},
|
9416 |
+
{
|
9417 |
+
"epoch": 0.22677271958030124,
|
9418 |
+
"grad_norm": 5.096493721008301,
|
9419 |
+
"learning_rate": 6.137101023910852e-08,
|
9420 |
+
"loss": 2.1808,
|
9421 |
+
"step": 1340
|
9422 |
+
},
|
9423 |
+
{
|
9424 |
+
"epoch": 0.22694195295312236,
|
9425 |
+
"grad_norm": 4.504518985748291,
|
9426 |
+
"learning_rate": 5.34616738070981e-08,
|
9427 |
+
"loss": 2.6772,
|
9428 |
+
"step": 1341
|
9429 |
+
},
|
9430 |
+
{
|
9431 |
+
"epoch": 0.2271111863259435,
|
9432 |
+
"grad_norm": 4.800610542297363,
|
9433 |
+
"learning_rate": 4.609762135184825e-08,
|
9434 |
+
"loss": 2.4838,
|
9435 |
+
"step": 1342
|
9436 |
+
},
|
9437 |
+
{
|
9438 |
+
"epoch": 0.22728041969876459,
|
9439 |
+
"grad_norm": 4.0853800773620605,
|
9440 |
+
"learning_rate": 3.927889304983223e-08,
|
9441 |
+
"loss": 2.1808,
|
9442 |
+
"step": 1343
|
9443 |
+
},
|
9444 |
+
{
|
9445 |
+
"epoch": 0.2274496530715857,
|
9446 |
+
"grad_norm": 5.056674957275391,
|
9447 |
+
"learning_rate": 3.3005526102380944e-08,
|
9448 |
+
"loss": 2.5508,
|
9449 |
+
"step": 1344
|
9450 |
+
},
|
9451 |
+
{
|
9452 |
+
"epoch": 0.22761888644440684,
|
9453 |
+
"grad_norm": 4.098772048950195,
|
9454 |
+
"learning_rate": 2.7277554735449794e-08,
|
9455 |
+
"loss": 2.2374,
|
9456 |
+
"step": 1345
|
9457 |
+
},
|
9458 |
+
{
|
9459 |
+
"epoch": 0.22778811981722796,
|
9460 |
+
"grad_norm": 3.807137966156006,
|
9461 |
+
"learning_rate": 2.209501019945215e-08,
|
9462 |
+
"loss": 2.5691,
|
9463 |
+
"step": 1346
|
9464 |
+
},
|
9465 |
+
{
|
9466 |
+
"epoch": 0.2279573531900491,
|
9467 |
+
"grad_norm": 4.1258955001831055,
|
9468 |
+
"learning_rate": 1.745792076910391e-08,
|
9469 |
+
"loss": 2.8063,
|
9470 |
+
"step": 1347
|
9471 |
+
},
|
9472 |
+
{
|
9473 |
+
"epoch": 0.2281265865628702,
|
9474 |
+
"grad_norm": 4.832547664642334,
|
9475 |
+
"learning_rate": 1.336631174322367e-08,
|
9476 |
+
"loss": 2.9441,
|
9477 |
+
"step": 1348
|
9478 |
+
},
|
9479 |
+
{
|
9480 |
+
"epoch": 0.2282958199356913,
|
9481 |
+
"grad_norm": 3.799896478652954,
|
9482 |
+
"learning_rate": 9.820205444632802e-09,
|
9483 |
+
"loss": 2.5411,
|
9484 |
+
"step": 1349
|
9485 |
+
},
|
9486 |
+
{
|
9487 |
+
"epoch": 0.22846505330851244,
|
9488 |
+
"grad_norm": 3.1178853511810303,
|
9489 |
+
"learning_rate": 6.8196212200333235e-09,
|
9490 |
+
"loss": 1.5001,
|
9491 |
+
"step": 1350
|
9492 |
+
},
|
9493 |
+
{
|
9494 |
+
"epoch": 0.22863428668133357,
|
9495 |
+
"grad_norm": 3.498769998550415,
|
9496 |
+
"learning_rate": 4.364575439874674e-09,
|
9497 |
+
"loss": 2.1578,
|
9498 |
+
"step": 1351
|
9499 |
+
},
|
9500 |
+
{
|
9501 |
+
"epoch": 0.2288035200541547,
|
9502 |
+
"grad_norm": 3.5528619289398193,
|
9503 |
+
"learning_rate": 2.455081498287104e-09,
|
9504 |
+
"loss": 2.5942,
|
9505 |
+
"step": 1352
|
9506 |
+
},
|
9507 |
+
{
|
9508 |
+
"epoch": 0.2289727534269758,
|
9509 |
+
"grad_norm": 4.122835636138916,
|
9510 |
+
"learning_rate": 1.0911498130039555e-09,
|
9511 |
+
"loss": 2.4448,
|
9512 |
+
"step": 1353
|
9513 |
+
},
|
9514 |
+
{
|
9515 |
+
"epoch": 0.22914198679979692,
|
9516 |
+
"grad_norm": 4.028839588165283,
|
9517 |
+
"learning_rate": 2.7278782531725553e-10,
|
9518 |
+
"loss": 2.7326,
|
9519 |
+
"step": 1354
|
9520 |
+
},
|
9521 |
+
{
|
9522 |
+
"epoch": 0.22931122017261804,
|
9523 |
+
"grad_norm": 3.5548853874206543,
|
9524 |
+
"learning_rate": 0.0,
|
9525 |
+
"loss": 1.9192,
|
9526 |
+
"step": 1355
|
9527 |
}
|
9528 |
],
|
9529 |
"logging_steps": 1,
|
|
|
9538 |
"should_evaluate": false,
|
9539 |
"should_log": false,
|
9540 |
"should_save": true,
|
9541 |
+
"should_training_stop": true
|
9542 |
},
|
9543 |
"attributes": {}
|
9544 |
}
|
9545 |
},
|
9546 |
+
"total_flos": 1.039607525277696e+16,
|
9547 |
"train_batch_size": 2,
|
9548 |
"trial_name": null,
|
9549 |
"trial_params": null
|