Training in progress, step 1554, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 83945296
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:01998ebdb89f2e144d30d1def3fb19cb6fa389ccc80391c1d8c790fc34acea76
|
3 |
size 83945296
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43123028
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3f72b6fad90555cd9929145bd606c3a57b9f1868834624aca20cf495234e9387
|
3 |
size 43123028
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc7623509bf468a9f0ca0433ee9ced97011757ae9a725236f326864abb9ae577
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f9817f8edc9cb5b39db981ecb240ea5a2cfbe7c3cb37093dba74fbe7c5aa21fa
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -8176,6 +8176,2715 @@
|
|
8176 |
"learning_rate": 1.4626739070119522e-05,
|
8177 |
"loss": 5.3065,
|
8178 |
"step": 1167
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8179 |
}
|
8180 |
],
|
8181 |
"logging_steps": 1,
|
@@ -8190,12 +10899,12 @@
|
|
8190 |
"should_evaluate": false,
|
8191 |
"should_log": false,
|
8192 |
"should_save": true,
|
8193 |
-
"should_training_stop":
|
8194 |
},
|
8195 |
"attributes": {}
|
8196 |
}
|
8197 |
},
|
8198 |
-
"total_flos":
|
8199 |
"train_batch_size": 4,
|
8200 |
"trial_name": null,
|
8201 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.0635024416157571,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 1554,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
8176 |
"learning_rate": 1.4626739070119522e-05,
|
8177 |
"loss": 5.3065,
|
8178 |
"step": 1167
|
8179 |
+
},
|
8180 |
+
{
|
8181 |
+
"epoch": 0.047728990866926835,
|
8182 |
+
"grad_norm": 16.56922149658203,
|
8183 |
+
"learning_rate": 1.4555142573332076e-05,
|
8184 |
+
"loss": 6.1598,
|
8185 |
+
"step": 1168
|
8186 |
+
},
|
8187 |
+
{
|
8188 |
+
"epoch": 0.04776985472897043,
|
8189 |
+
"grad_norm": 13.160951614379883,
|
8190 |
+
"learning_rate": 1.4483691874016974e-05,
|
8191 |
+
"loss": 5.4143,
|
8192 |
+
"step": 1169
|
8193 |
+
},
|
8194 |
+
{
|
8195 |
+
"epoch": 0.04781071859101404,
|
8196 |
+
"grad_norm": 13.78029727935791,
|
8197 |
+
"learning_rate": 1.4412387266076676e-05,
|
8198 |
+
"loss": 6.8752,
|
8199 |
+
"step": 1170
|
8200 |
+
},
|
8201 |
+
{
|
8202 |
+
"epoch": 0.04785158245305764,
|
8203 |
+
"grad_norm": 13.718806266784668,
|
8204 |
+
"learning_rate": 1.4341229042812766e-05,
|
8205 |
+
"loss": 5.9538,
|
8206 |
+
"step": 1171
|
8207 |
+
},
|
8208 |
+
{
|
8209 |
+
"epoch": 0.04789244631510124,
|
8210 |
+
"grad_norm": 13.259661674499512,
|
8211 |
+
"learning_rate": 1.4270217496924626e-05,
|
8212 |
+
"loss": 5.8544,
|
8213 |
+
"step": 1172
|
8214 |
+
},
|
8215 |
+
{
|
8216 |
+
"epoch": 0.04793331017714484,
|
8217 |
+
"grad_norm": 14.463263511657715,
|
8218 |
+
"learning_rate": 1.4199352920508363e-05,
|
8219 |
+
"loss": 6.8695,
|
8220 |
+
"step": 1173
|
8221 |
+
},
|
8222 |
+
{
|
8223 |
+
"epoch": 0.047974174039188446,
|
8224 |
+
"grad_norm": 14.733413696289062,
|
8225 |
+
"learning_rate": 1.4128635605055512e-05,
|
8226 |
+
"loss": 6.1099,
|
8227 |
+
"step": 1174
|
8228 |
+
},
|
8229 |
+
{
|
8230 |
+
"epoch": 0.048015037901232044,
|
8231 |
+
"grad_norm": 14.981409072875977,
|
8232 |
+
"learning_rate": 1.4058065841451857e-05,
|
8233 |
+
"loss": 7.051,
|
8234 |
+
"step": 1175
|
8235 |
+
},
|
8236 |
+
{
|
8237 |
+
"epoch": 0.04805590176327565,
|
8238 |
+
"grad_norm": 13.149474143981934,
|
8239 |
+
"learning_rate": 1.3987643919976285e-05,
|
8240 |
+
"loss": 5.644,
|
8241 |
+
"step": 1176
|
8242 |
+
},
|
8243 |
+
{
|
8244 |
+
"epoch": 0.04809676562531925,
|
8245 |
+
"grad_norm": 13.739705085754395,
|
8246 |
+
"learning_rate": 1.3917370130299545e-05,
|
8247 |
+
"loss": 5.6014,
|
8248 |
+
"step": 1177
|
8249 |
+
},
|
8250 |
+
{
|
8251 |
+
"epoch": 0.04813762948736285,
|
8252 |
+
"grad_norm": 13.729369163513184,
|
8253 |
+
"learning_rate": 1.3847244761483052e-05,
|
8254 |
+
"loss": 5.7263,
|
8255 |
+
"step": 1178
|
8256 |
+
},
|
8257 |
+
{
|
8258 |
+
"epoch": 0.04817849334940645,
|
8259 |
+
"grad_norm": 12.26607894897461,
|
8260 |
+
"learning_rate": 1.3777268101977719e-05,
|
8261 |
+
"loss": 5.6141,
|
8262 |
+
"step": 1179
|
8263 |
+
},
|
8264 |
+
{
|
8265 |
+
"epoch": 0.048219357211450056,
|
8266 |
+
"grad_norm": 14.923595428466797,
|
8267 |
+
"learning_rate": 1.3707440439622754e-05,
|
8268 |
+
"loss": 5.7974,
|
8269 |
+
"step": 1180
|
8270 |
+
},
|
8271 |
+
{
|
8272 |
+
"epoch": 0.048260221073493655,
|
8273 |
+
"grad_norm": 15.90456771850586,
|
8274 |
+
"learning_rate": 1.36377620616445e-05,
|
8275 |
+
"loss": 6.091,
|
8276 |
+
"step": 1181
|
8277 |
+
},
|
8278 |
+
{
|
8279 |
+
"epoch": 0.04830108493553726,
|
8280 |
+
"grad_norm": 14.041563987731934,
|
8281 |
+
"learning_rate": 1.3568233254655265e-05,
|
8282 |
+
"loss": 6.4794,
|
8283 |
+
"step": 1182
|
8284 |
+
},
|
8285 |
+
{
|
8286 |
+
"epoch": 0.04834194879758086,
|
8287 |
+
"grad_norm": 12.559455871582031,
|
8288 |
+
"learning_rate": 1.3498854304652064e-05,
|
8289 |
+
"loss": 5.2867,
|
8290 |
+
"step": 1183
|
8291 |
+
},
|
8292 |
+
{
|
8293 |
+
"epoch": 0.04838281265962446,
|
8294 |
+
"grad_norm": 16.288496017456055,
|
8295 |
+
"learning_rate": 1.3429625497015514e-05,
|
8296 |
+
"loss": 7.0109,
|
8297 |
+
"step": 1184
|
8298 |
+
},
|
8299 |
+
{
|
8300 |
+
"epoch": 0.04842367652166806,
|
8301 |
+
"grad_norm": 14.173421859741211,
|
8302 |
+
"learning_rate": 1.3360547116508671e-05,
|
8303 |
+
"loss": 6.2304,
|
8304 |
+
"step": 1185
|
8305 |
+
},
|
8306 |
+
{
|
8307 |
+
"epoch": 0.04846454038371167,
|
8308 |
+
"grad_norm": 16.93602180480957,
|
8309 |
+
"learning_rate": 1.3291619447275789e-05,
|
8310 |
+
"loss": 6.3068,
|
8311 |
+
"step": 1186
|
8312 |
+
},
|
8313 |
+
{
|
8314 |
+
"epoch": 0.048505404245755265,
|
8315 |
+
"grad_norm": 19.0164794921875,
|
8316 |
+
"learning_rate": 1.3222842772841231e-05,
|
8317 |
+
"loss": 7.1744,
|
8318 |
+
"step": 1187
|
8319 |
+
},
|
8320 |
+
{
|
8321 |
+
"epoch": 0.04854626810779887,
|
8322 |
+
"grad_norm": 13.528770446777344,
|
8323 |
+
"learning_rate": 1.315421737610823e-05,
|
8324 |
+
"loss": 5.6083,
|
8325 |
+
"step": 1188
|
8326 |
+
},
|
8327 |
+
{
|
8328 |
+
"epoch": 0.04858713196984247,
|
8329 |
+
"grad_norm": 16.025054931640625,
|
8330 |
+
"learning_rate": 1.3085743539357787e-05,
|
8331 |
+
"loss": 5.9953,
|
8332 |
+
"step": 1189
|
8333 |
+
},
|
8334 |
+
{
|
8335 |
+
"epoch": 0.048627995831886074,
|
8336 |
+
"grad_norm": 14.402274131774902,
|
8337 |
+
"learning_rate": 1.3017421544247465e-05,
|
8338 |
+
"loss": 5.1612,
|
8339 |
+
"step": 1190
|
8340 |
+
},
|
8341 |
+
{
|
8342 |
+
"epoch": 0.04866885969392967,
|
8343 |
+
"grad_norm": 15.096915245056152,
|
8344 |
+
"learning_rate": 1.2949251671810236e-05,
|
8345 |
+
"loss": 5.8464,
|
8346 |
+
"step": 1191
|
8347 |
+
},
|
8348 |
+
{
|
8349 |
+
"epoch": 0.04870972355597328,
|
8350 |
+
"grad_norm": 17.112619400024414,
|
8351 |
+
"learning_rate": 1.2881234202453384e-05,
|
8352 |
+
"loss": 6.2358,
|
8353 |
+
"step": 1192
|
8354 |
+
},
|
8355 |
+
{
|
8356 |
+
"epoch": 0.048750587418016876,
|
8357 |
+
"grad_norm": 16.391468048095703,
|
8358 |
+
"learning_rate": 1.2813369415957233e-05,
|
8359 |
+
"loss": 6.6303,
|
8360 |
+
"step": 1193
|
8361 |
+
},
|
8362 |
+
{
|
8363 |
+
"epoch": 0.04879145128006048,
|
8364 |
+
"grad_norm": 17.587890625,
|
8365 |
+
"learning_rate": 1.2745657591474152e-05,
|
8366 |
+
"loss": 7.2622,
|
8367 |
+
"step": 1194
|
8368 |
+
},
|
8369 |
+
{
|
8370 |
+
"epoch": 0.04883231514210408,
|
8371 |
+
"grad_norm": 17.28040885925293,
|
8372 |
+
"learning_rate": 1.267809900752725e-05,
|
8373 |
+
"loss": 6.2785,
|
8374 |
+
"step": 1195
|
8375 |
+
},
|
8376 |
+
{
|
8377 |
+
"epoch": 0.048873179004147685,
|
8378 |
+
"grad_norm": 19.080217361450195,
|
8379 |
+
"learning_rate": 1.2610693942009317e-05,
|
8380 |
+
"loss": 6.4922,
|
8381 |
+
"step": 1196
|
8382 |
+
},
|
8383 |
+
{
|
8384 |
+
"epoch": 0.04891404286619128,
|
8385 |
+
"grad_norm": 19.673229217529297,
|
8386 |
+
"learning_rate": 1.2543442672181704e-05,
|
8387 |
+
"loss": 6.0196,
|
8388 |
+
"step": 1197
|
8389 |
+
},
|
8390 |
+
{
|
8391 |
+
"epoch": 0.04895490672823489,
|
8392 |
+
"grad_norm": 17.318593978881836,
|
8393 |
+
"learning_rate": 1.2476345474673124e-05,
|
8394 |
+
"loss": 5.1824,
|
8395 |
+
"step": 1198
|
8396 |
+
},
|
8397 |
+
{
|
8398 |
+
"epoch": 0.04899577059027849,
|
8399 |
+
"grad_norm": 18.142581939697266,
|
8400 |
+
"learning_rate": 1.2409402625478518e-05,
|
8401 |
+
"loss": 6.1025,
|
8402 |
+
"step": 1199
|
8403 |
+
},
|
8404 |
+
{
|
8405 |
+
"epoch": 0.04903663445232209,
|
8406 |
+
"grad_norm": 23.89717674255371,
|
8407 |
+
"learning_rate": 1.2342614399957952e-05,
|
8408 |
+
"loss": 8.1561,
|
8409 |
+
"step": 1200
|
8410 |
+
},
|
8411 |
+
{
|
8412 |
+
"epoch": 0.04907749831436569,
|
8413 |
+
"grad_norm": 12.024513244628906,
|
8414 |
+
"learning_rate": 1.2275981072835452e-05,
|
8415 |
+
"loss": 5.6642,
|
8416 |
+
"step": 1201
|
8417 |
+
},
|
8418 |
+
{
|
8419 |
+
"epoch": 0.049118362176409296,
|
8420 |
+
"grad_norm": 10.529586791992188,
|
8421 |
+
"learning_rate": 1.2209502918197919e-05,
|
8422 |
+
"loss": 5.0775,
|
8423 |
+
"step": 1202
|
8424 |
+
},
|
8425 |
+
{
|
8426 |
+
"epoch": 0.049159226038452894,
|
8427 |
+
"grad_norm": 11.508085250854492,
|
8428 |
+
"learning_rate": 1.2143180209493982e-05,
|
8429 |
+
"loss": 5.5091,
|
8430 |
+
"step": 1203
|
8431 |
+
},
|
8432 |
+
{
|
8433 |
+
"epoch": 0.0492000899004965,
|
8434 |
+
"grad_norm": 11.050451278686523,
|
8435 |
+
"learning_rate": 1.2077013219532817e-05,
|
8436 |
+
"loss": 5.8603,
|
8437 |
+
"step": 1204
|
8438 |
+
},
|
8439 |
+
{
|
8440 |
+
"epoch": 0.0492409537625401,
|
8441 |
+
"grad_norm": 9.736432075500488,
|
8442 |
+
"learning_rate": 1.2011002220483097e-05,
|
8443 |
+
"loss": 5.4329,
|
8444 |
+
"step": 1205
|
8445 |
+
},
|
8446 |
+
{
|
8447 |
+
"epoch": 0.0492818176245837,
|
8448 |
+
"grad_norm": 9.99765396118164,
|
8449 |
+
"learning_rate": 1.1945147483871871e-05,
|
8450 |
+
"loss": 5.1246,
|
8451 |
+
"step": 1206
|
8452 |
+
},
|
8453 |
+
{
|
8454 |
+
"epoch": 0.0493226814866273,
|
8455 |
+
"grad_norm": 12.543411254882812,
|
8456 |
+
"learning_rate": 1.1879449280583393e-05,
|
8457 |
+
"loss": 6.7258,
|
8458 |
+
"step": 1207
|
8459 |
+
},
|
8460 |
+
{
|
8461 |
+
"epoch": 0.0493635453486709,
|
8462 |
+
"grad_norm": 9.004258155822754,
|
8463 |
+
"learning_rate": 1.1813907880858032e-05,
|
8464 |
+
"loss": 4.4121,
|
8465 |
+
"step": 1208
|
8466 |
+
},
|
8467 |
+
{
|
8468 |
+
"epoch": 0.049404409210714505,
|
8469 |
+
"grad_norm": 12.22309398651123,
|
8470 |
+
"learning_rate": 1.174852355429122e-05,
|
8471 |
+
"loss": 5.5597,
|
8472 |
+
"step": 1209
|
8473 |
+
},
|
8474 |
+
{
|
8475 |
+
"epoch": 0.0494452730727581,
|
8476 |
+
"grad_norm": 15.347993850708008,
|
8477 |
+
"learning_rate": 1.168329656983222e-05,
|
8478 |
+
"loss": 5.6602,
|
8479 |
+
"step": 1210
|
8480 |
+
},
|
8481 |
+
{
|
8482 |
+
"epoch": 0.04948613693480171,
|
8483 |
+
"grad_norm": 10.857593536376953,
|
8484 |
+
"learning_rate": 1.161822719578316e-05,
|
8485 |
+
"loss": 5.6668,
|
8486 |
+
"step": 1211
|
8487 |
+
},
|
8488 |
+
{
|
8489 |
+
"epoch": 0.04952700079684531,
|
8490 |
+
"grad_norm": 12.994451522827148,
|
8491 |
+
"learning_rate": 1.1553315699797807e-05,
|
8492 |
+
"loss": 5.8107,
|
8493 |
+
"step": 1212
|
8494 |
+
},
|
8495 |
+
{
|
8496 |
+
"epoch": 0.04956786465888891,
|
8497 |
+
"grad_norm": 12.435081481933594,
|
8498 |
+
"learning_rate": 1.1488562348880528e-05,
|
8499 |
+
"loss": 6.6048,
|
8500 |
+
"step": 1213
|
8501 |
+
},
|
8502 |
+
{
|
8503 |
+
"epoch": 0.04960872852093251,
|
8504 |
+
"grad_norm": 14.909284591674805,
|
8505 |
+
"learning_rate": 1.1423967409385195e-05,
|
8506 |
+
"loss": 6.0931,
|
8507 |
+
"step": 1214
|
8508 |
+
},
|
8509 |
+
{
|
8510 |
+
"epoch": 0.049649592382976115,
|
8511 |
+
"grad_norm": 13.033126831054688,
|
8512 |
+
"learning_rate": 1.1359531147014101e-05,
|
8513 |
+
"loss": 5.9901,
|
8514 |
+
"step": 1215
|
8515 |
+
},
|
8516 |
+
{
|
8517 |
+
"epoch": 0.049690456245019714,
|
8518 |
+
"grad_norm": 12.553189277648926,
|
8519 |
+
"learning_rate": 1.1295253826816788e-05,
|
8520 |
+
"loss": 5.5477,
|
8521 |
+
"step": 1216
|
8522 |
+
},
|
8523 |
+
{
|
8524 |
+
"epoch": 0.04973132010706332,
|
8525 |
+
"grad_norm": 11.070270538330078,
|
8526 |
+
"learning_rate": 1.123113571318905e-05,
|
8527 |
+
"loss": 5.6236,
|
8528 |
+
"step": 1217
|
8529 |
+
},
|
8530 |
+
{
|
8531 |
+
"epoch": 0.04977218396910692,
|
8532 |
+
"grad_norm": 12.580347061157227,
|
8533 |
+
"learning_rate": 1.1167177069871759e-05,
|
8534 |
+
"loss": 5.4246,
|
8535 |
+
"step": 1218
|
8536 |
+
},
|
8537 |
+
{
|
8538 |
+
"epoch": 0.04981304783115052,
|
8539 |
+
"grad_norm": 10.129332542419434,
|
8540 |
+
"learning_rate": 1.1103378159949923e-05,
|
8541 |
+
"loss": 4.8745,
|
8542 |
+
"step": 1219
|
8543 |
+
},
|
8544 |
+
{
|
8545 |
+
"epoch": 0.04985391169319412,
|
8546 |
+
"grad_norm": 12.94534683227539,
|
8547 |
+
"learning_rate": 1.1039739245851427e-05,
|
8548 |
+
"loss": 5.3851,
|
8549 |
+
"step": 1220
|
8550 |
+
},
|
8551 |
+
{
|
8552 |
+
"epoch": 0.049894775555237726,
|
8553 |
+
"grad_norm": 12.84284496307373,
|
8554 |
+
"learning_rate": 1.0976260589346054e-05,
|
8555 |
+
"loss": 5.7643,
|
8556 |
+
"step": 1221
|
8557 |
+
},
|
8558 |
+
{
|
8559 |
+
"epoch": 0.049935639417281324,
|
8560 |
+
"grad_norm": 12.04676628112793,
|
8561 |
+
"learning_rate": 1.0912942451544377e-05,
|
8562 |
+
"loss": 6.1616,
|
8563 |
+
"step": 1222
|
8564 |
+
},
|
8565 |
+
{
|
8566 |
+
"epoch": 0.04997650327932493,
|
8567 |
+
"grad_norm": 14.473459243774414,
|
8568 |
+
"learning_rate": 1.0849785092896758e-05,
|
8569 |
+
"loss": 7.3831,
|
8570 |
+
"step": 1223
|
8571 |
+
},
|
8572 |
+
{
|
8573 |
+
"epoch": 0.05001736714136853,
|
8574 |
+
"grad_norm": 12.467133522033691,
|
8575 |
+
"learning_rate": 1.078678877319213e-05,
|
8576 |
+
"loss": 5.6528,
|
8577 |
+
"step": 1224
|
8578 |
+
},
|
8579 |
+
{
|
8580 |
+
"epoch": 0.05005823100341213,
|
8581 |
+
"grad_norm": 13.110004425048828,
|
8582 |
+
"learning_rate": 1.0723953751557097e-05,
|
8583 |
+
"loss": 4.9858,
|
8584 |
+
"step": 1225
|
8585 |
+
},
|
8586 |
+
{
|
8587 |
+
"epoch": 0.05009909486545573,
|
8588 |
+
"grad_norm": 13.496818542480469,
|
8589 |
+
"learning_rate": 1.066128028645471e-05,
|
8590 |
+
"loss": 6.3312,
|
8591 |
+
"step": 1226
|
8592 |
+
},
|
8593 |
+
{
|
8594 |
+
"epoch": 0.05013995872749934,
|
8595 |
+
"grad_norm": 11.86417293548584,
|
8596 |
+
"learning_rate": 1.059876863568356e-05,
|
8597 |
+
"loss": 5.7305,
|
8598 |
+
"step": 1227
|
8599 |
+
},
|
8600 |
+
{
|
8601 |
+
"epoch": 0.050180822589542935,
|
8602 |
+
"grad_norm": 12.633896827697754,
|
8603 |
+
"learning_rate": 1.0536419056376568e-05,
|
8604 |
+
"loss": 5.9026,
|
8605 |
+
"step": 1228
|
8606 |
+
},
|
8607 |
+
{
|
8608 |
+
"epoch": 0.05022168645158654,
|
8609 |
+
"grad_norm": 12.399638175964355,
|
8610 |
+
"learning_rate": 1.0474231805000018e-05,
|
8611 |
+
"loss": 5.5516,
|
8612 |
+
"step": 1229
|
8613 |
+
},
|
8614 |
+
{
|
8615 |
+
"epoch": 0.05026255031363014,
|
8616 |
+
"grad_norm": 12.876869201660156,
|
8617 |
+
"learning_rate": 1.0412207137352504e-05,
|
8618 |
+
"loss": 5.7214,
|
8619 |
+
"step": 1230
|
8620 |
+
},
|
8621 |
+
{
|
8622 |
+
"epoch": 0.050303414175673744,
|
8623 |
+
"grad_norm": 14.316557884216309,
|
8624 |
+
"learning_rate": 1.035034530856382e-05,
|
8625 |
+
"loss": 5.3783,
|
8626 |
+
"step": 1231
|
8627 |
+
},
|
8628 |
+
{
|
8629 |
+
"epoch": 0.05034427803771734,
|
8630 |
+
"grad_norm": 12.174219131469727,
|
8631 |
+
"learning_rate": 1.028864657309398e-05,
|
8632 |
+
"loss": 5.5782,
|
8633 |
+
"step": 1232
|
8634 |
+
},
|
8635 |
+
{
|
8636 |
+
"epoch": 0.05038514189976095,
|
8637 |
+
"grad_norm": 13.287007331848145,
|
8638 |
+
"learning_rate": 1.0227111184732113e-05,
|
8639 |
+
"loss": 6.2739,
|
8640 |
+
"step": 1233
|
8641 |
+
},
|
8642 |
+
{
|
8643 |
+
"epoch": 0.050426005761804546,
|
8644 |
+
"grad_norm": 11.273058891296387,
|
8645 |
+
"learning_rate": 1.016573939659543e-05,
|
8646 |
+
"loss": 5.3985,
|
8647 |
+
"step": 1234
|
8648 |
+
},
|
8649 |
+
{
|
8650 |
+
"epoch": 0.05046686962384815,
|
8651 |
+
"grad_norm": 14.127720832824707,
|
8652 |
+
"learning_rate": 1.0104531461128226e-05,
|
8653 |
+
"loss": 5.8588,
|
8654 |
+
"step": 1235
|
8655 |
+
},
|
8656 |
+
{
|
8657 |
+
"epoch": 0.05050773348589175,
|
8658 |
+
"grad_norm": 15.278068542480469,
|
8659 |
+
"learning_rate": 1.0043487630100812e-05,
|
8660 |
+
"loss": 5.7859,
|
8661 |
+
"step": 1236
|
8662 |
+
},
|
8663 |
+
{
|
8664 |
+
"epoch": 0.050548597347935355,
|
8665 |
+
"grad_norm": 13.765253067016602,
|
8666 |
+
"learning_rate": 9.982608154608446e-06,
|
8667 |
+
"loss": 6.1172,
|
8668 |
+
"step": 1237
|
8669 |
+
},
|
8670 |
+
{
|
8671 |
+
"epoch": 0.05058946120997895,
|
8672 |
+
"grad_norm": 14.134552001953125,
|
8673 |
+
"learning_rate": 9.921893285070361e-06,
|
8674 |
+
"loss": 5.7363,
|
8675 |
+
"step": 1238
|
8676 |
+
},
|
8677 |
+
{
|
8678 |
+
"epoch": 0.05063032507202256,
|
8679 |
+
"grad_norm": 15.40446949005127,
|
8680 |
+
"learning_rate": 9.861343271228678e-06,
|
8681 |
+
"loss": 5.7238,
|
8682 |
+
"step": 1239
|
8683 |
+
},
|
8684 |
+
{
|
8685 |
+
"epoch": 0.05067118893406616,
|
8686 |
+
"grad_norm": 14.402509689331055,
|
8687 |
+
"learning_rate": 9.800958362147434e-06,
|
8688 |
+
"loss": 5.7395,
|
8689 |
+
"step": 1240
|
8690 |
+
},
|
8691 |
+
{
|
8692 |
+
"epoch": 0.05071205279610976,
|
8693 |
+
"grad_norm": 14.785293579101562,
|
8694 |
+
"learning_rate": 9.740738806211542e-06,
|
8695 |
+
"loss": 5.7468,
|
8696 |
+
"step": 1241
|
8697 |
+
},
|
8698 |
+
{
|
8699 |
+
"epoch": 0.05075291665815336,
|
8700 |
+
"grad_norm": 15.105719566345215,
|
8701 |
+
"learning_rate": 9.680684851125715e-06,
|
8702 |
+
"loss": 6.108,
|
8703 |
+
"step": 1242
|
8704 |
+
},
|
8705 |
+
{
|
8706 |
+
"epoch": 0.050793780520196966,
|
8707 |
+
"grad_norm": 16.200971603393555,
|
8708 |
+
"learning_rate": 9.620796743913513e-06,
|
8709 |
+
"loss": 5.3941,
|
8710 |
+
"step": 1243
|
8711 |
+
},
|
8712 |
+
{
|
8713 |
+
"epoch": 0.050834644382240564,
|
8714 |
+
"grad_norm": 19.255962371826172,
|
8715 |
+
"learning_rate": 9.561074730916319e-06,
|
8716 |
+
"loss": 6.1458,
|
8717 |
+
"step": 1244
|
8718 |
+
},
|
8719 |
+
{
|
8720 |
+
"epoch": 0.05087550824428417,
|
8721 |
+
"grad_norm": 16.267532348632812,
|
8722 |
+
"learning_rate": 9.501519057792274e-06,
|
8723 |
+
"loss": 4.9393,
|
8724 |
+
"step": 1245
|
8725 |
+
},
|
8726 |
+
{
|
8727 |
+
"epoch": 0.05091637210632777,
|
8728 |
+
"grad_norm": 18.626277923583984,
|
8729 |
+
"learning_rate": 9.442129969515351e-06,
|
8730 |
+
"loss": 6.3728,
|
8731 |
+
"step": 1246
|
8732 |
+
},
|
8733 |
+
{
|
8734 |
+
"epoch": 0.05095723596837137,
|
8735 |
+
"grad_norm": 17.49756622314453,
|
8736 |
+
"learning_rate": 9.382907710374256e-06,
|
8737 |
+
"loss": 6.9734,
|
8738 |
+
"step": 1247
|
8739 |
+
},
|
8740 |
+
{
|
8741 |
+
"epoch": 0.05099809983041497,
|
8742 |
+
"grad_norm": 16.13072967529297,
|
8743 |
+
"learning_rate": 9.323852523971472e-06,
|
8744 |
+
"loss": 5.5157,
|
8745 |
+
"step": 1248
|
8746 |
+
},
|
8747 |
+
{
|
8748 |
+
"epoch": 0.051038963692458576,
|
8749 |
+
"grad_norm": 17.544748306274414,
|
8750 |
+
"learning_rate": 9.264964653222292e-06,
|
8751 |
+
"loss": 6.2915,
|
8752 |
+
"step": 1249
|
8753 |
+
},
|
8754 |
+
{
|
8755 |
+
"epoch": 0.051079827554502175,
|
8756 |
+
"grad_norm": 35.300872802734375,
|
8757 |
+
"learning_rate": 9.20624434035373e-06,
|
8758 |
+
"loss": 6.9662,
|
8759 |
+
"step": 1250
|
8760 |
+
},
|
8761 |
+
{
|
8762 |
+
"epoch": 0.05112069141654578,
|
8763 |
+
"grad_norm": 9.494024276733398,
|
8764 |
+
"learning_rate": 9.147691826903593e-06,
|
8765 |
+
"loss": 4.8124,
|
8766 |
+
"step": 1251
|
8767 |
+
},
|
8768 |
+
{
|
8769 |
+
"epoch": 0.05116155527858938,
|
8770 |
+
"grad_norm": 12.89008617401123,
|
8771 |
+
"learning_rate": 9.089307353719479e-06,
|
8772 |
+
"loss": 5.4176,
|
8773 |
+
"step": 1252
|
8774 |
+
},
|
8775 |
+
{
|
8776 |
+
"epoch": 0.05120241914063298,
|
8777 |
+
"grad_norm": 10.269113540649414,
|
8778 |
+
"learning_rate": 9.031091160957772e-06,
|
8779 |
+
"loss": 5.2947,
|
8780 |
+
"step": 1253
|
8781 |
+
},
|
8782 |
+
{
|
8783 |
+
"epoch": 0.05124328300267658,
|
8784 |
+
"grad_norm": 11.057022094726562,
|
8785 |
+
"learning_rate": 8.973043488082649e-06,
|
8786 |
+
"loss": 5.8336,
|
8787 |
+
"step": 1254
|
8788 |
+
},
|
8789 |
+
{
|
8790 |
+
"epoch": 0.05128414686472019,
|
8791 |
+
"grad_norm": 11.286837577819824,
|
8792 |
+
"learning_rate": 8.915164573865109e-06,
|
8793 |
+
"loss": 6.355,
|
8794 |
+
"step": 1255
|
8795 |
+
},
|
8796 |
+
{
|
8797 |
+
"epoch": 0.051325010726763785,
|
8798 |
+
"grad_norm": 10.454761505126953,
|
8799 |
+
"learning_rate": 8.857454656381952e-06,
|
8800 |
+
"loss": 5.5888,
|
8801 |
+
"step": 1256
|
8802 |
+
},
|
8803 |
+
{
|
8804 |
+
"epoch": 0.05136587458880739,
|
8805 |
+
"grad_norm": 11.674518585205078,
|
8806 |
+
"learning_rate": 8.799913973014923e-06,
|
8807 |
+
"loss": 6.07,
|
8808 |
+
"step": 1257
|
8809 |
+
},
|
8810 |
+
{
|
8811 |
+
"epoch": 0.05140673845085099,
|
8812 |
+
"grad_norm": 12.686792373657227,
|
8813 |
+
"learning_rate": 8.742542760449563e-06,
|
8814 |
+
"loss": 5.7491,
|
8815 |
+
"step": 1258
|
8816 |
+
},
|
8817 |
+
{
|
8818 |
+
"epoch": 0.051447602312894594,
|
8819 |
+
"grad_norm": 11.083107948303223,
|
8820 |
+
"learning_rate": 8.68534125467434e-06,
|
8821 |
+
"loss": 5.3407,
|
8822 |
+
"step": 1259
|
8823 |
+
},
|
8824 |
+
{
|
8825 |
+
"epoch": 0.05148846617493819,
|
8826 |
+
"grad_norm": 11.931784629821777,
|
8827 |
+
"learning_rate": 8.628309690979657e-06,
|
8828 |
+
"loss": 6.3415,
|
8829 |
+
"step": 1260
|
8830 |
+
},
|
8831 |
+
{
|
8832 |
+
"epoch": 0.0515293300369818,
|
8833 |
+
"grad_norm": 10.646793365478516,
|
8834 |
+
"learning_rate": 8.571448303956898e-06,
|
8835 |
+
"loss": 5.1177,
|
8836 |
+
"step": 1261
|
8837 |
+
},
|
8838 |
+
{
|
8839 |
+
"epoch": 0.051570193899025396,
|
8840 |
+
"grad_norm": 11.873929023742676,
|
8841 |
+
"learning_rate": 8.514757327497446e-06,
|
8842 |
+
"loss": 5.7963,
|
8843 |
+
"step": 1262
|
8844 |
+
},
|
8845 |
+
{
|
8846 |
+
"epoch": 0.051611057761069,
|
8847 |
+
"grad_norm": 13.434286117553711,
|
8848 |
+
"learning_rate": 8.458236994791712e-06,
|
8849 |
+
"loss": 6.3954,
|
8850 |
+
"step": 1263
|
8851 |
+
},
|
8852 |
+
{
|
8853 |
+
"epoch": 0.0516519216231126,
|
8854 |
+
"grad_norm": 12.250168800354004,
|
8855 |
+
"learning_rate": 8.401887538328196e-06,
|
8856 |
+
"loss": 4.7648,
|
8857 |
+
"step": 1264
|
8858 |
+
},
|
8859 |
+
{
|
8860 |
+
"epoch": 0.051692785485156205,
|
8861 |
+
"grad_norm": 15.333703994750977,
|
8862 |
+
"learning_rate": 8.345709189892503e-06,
|
8863 |
+
"loss": 6.504,
|
8864 |
+
"step": 1265
|
8865 |
+
},
|
8866 |
+
{
|
8867 |
+
"epoch": 0.0517336493471998,
|
8868 |
+
"grad_norm": 13.18412971496582,
|
8869 |
+
"learning_rate": 8.28970218056645e-06,
|
8870 |
+
"loss": 6.0472,
|
8871 |
+
"step": 1266
|
8872 |
+
},
|
8873 |
+
{
|
8874 |
+
"epoch": 0.05177451320924341,
|
8875 |
+
"grad_norm": 11.889876365661621,
|
8876 |
+
"learning_rate": 8.23386674072703e-06,
|
8877 |
+
"loss": 5.4082,
|
8878 |
+
"step": 1267
|
8879 |
+
},
|
8880 |
+
{
|
8881 |
+
"epoch": 0.05181537707128701,
|
8882 |
+
"grad_norm": 13.549942970275879,
|
8883 |
+
"learning_rate": 8.17820310004555e-06,
|
8884 |
+
"loss": 6.1495,
|
8885 |
+
"step": 1268
|
8886 |
+
},
|
8887 |
+
{
|
8888 |
+
"epoch": 0.05185624093333061,
|
8889 |
+
"grad_norm": 13.083065032958984,
|
8890 |
+
"learning_rate": 8.122711487486595e-06,
|
8891 |
+
"loss": 5.8732,
|
8892 |
+
"step": 1269
|
8893 |
+
},
|
8894 |
+
{
|
8895 |
+
"epoch": 0.05189710479537421,
|
8896 |
+
"grad_norm": 12.908683776855469,
|
8897 |
+
"learning_rate": 8.0673921313072e-06,
|
8898 |
+
"loss": 6.633,
|
8899 |
+
"step": 1270
|
8900 |
+
},
|
8901 |
+
{
|
8902 |
+
"epoch": 0.051937968657417816,
|
8903 |
+
"grad_norm": 16.046030044555664,
|
8904 |
+
"learning_rate": 8.0122452590558e-06,
|
8905 |
+
"loss": 6.5138,
|
8906 |
+
"step": 1271
|
8907 |
+
},
|
8908 |
+
{
|
8909 |
+
"epoch": 0.051978832519461414,
|
8910 |
+
"grad_norm": 12.690372467041016,
|
8911 |
+
"learning_rate": 7.957271097571334e-06,
|
8912 |
+
"loss": 5.9849,
|
8913 |
+
"step": 1272
|
8914 |
+
},
|
8915 |
+
{
|
8916 |
+
"epoch": 0.05201969638150502,
|
8917 |
+
"grad_norm": 13.320104598999023,
|
8918 |
+
"learning_rate": 7.90246987298236e-06,
|
8919 |
+
"loss": 5.2731,
|
8920 |
+
"step": 1273
|
8921 |
+
},
|
8922 |
+
{
|
8923 |
+
"epoch": 0.05206056024354862,
|
8924 |
+
"grad_norm": 11.995138168334961,
|
8925 |
+
"learning_rate": 7.847841810706074e-06,
|
8926 |
+
"loss": 5.4736,
|
8927 |
+
"step": 1274
|
8928 |
+
},
|
8929 |
+
{
|
8930 |
+
"epoch": 0.05210142410559222,
|
8931 |
+
"grad_norm": 14.164356231689453,
|
8932 |
+
"learning_rate": 7.793387135447372e-06,
|
8933 |
+
"loss": 5.7038,
|
8934 |
+
"step": 1275
|
8935 |
+
},
|
8936 |
+
{
|
8937 |
+
"epoch": 0.05214228796763582,
|
8938 |
+
"grad_norm": 15.582404136657715,
|
8939 |
+
"learning_rate": 7.739106071197954e-06,
|
8940 |
+
"loss": 5.8561,
|
8941 |
+
"step": 1276
|
8942 |
+
},
|
8943 |
+
{
|
8944 |
+
"epoch": 0.052183151829679426,
|
8945 |
+
"grad_norm": 10.692551612854004,
|
8946 |
+
"learning_rate": 7.684998841235391e-06,
|
8947 |
+
"loss": 5.0494,
|
8948 |
+
"step": 1277
|
8949 |
+
},
|
8950 |
+
{
|
8951 |
+
"epoch": 0.052224015691723025,
|
8952 |
+
"grad_norm": 14.861363410949707,
|
8953 |
+
"learning_rate": 7.631065668122233e-06,
|
8954 |
+
"loss": 7.0001,
|
8955 |
+
"step": 1278
|
8956 |
+
},
|
8957 |
+
{
|
8958 |
+
"epoch": 0.05226487955376662,
|
8959 |
+
"grad_norm": 12.813691139221191,
|
8960 |
+
"learning_rate": 7.577306773705062e-06,
|
8961 |
+
"loss": 5.3477,
|
8962 |
+
"step": 1279
|
8963 |
+
},
|
8964 |
+
{
|
8965 |
+
"epoch": 0.05230574341581023,
|
8966 |
+
"grad_norm": 13.51701831817627,
|
8967 |
+
"learning_rate": 7.523722379113579e-06,
|
8968 |
+
"loss": 6.1212,
|
8969 |
+
"step": 1280
|
8970 |
+
},
|
8971 |
+
{
|
8972 |
+
"epoch": 0.05234660727785383,
|
8973 |
+
"grad_norm": 33.655460357666016,
|
8974 |
+
"learning_rate": 7.470312704759697e-06,
|
8975 |
+
"loss": 6.4791,
|
8976 |
+
"step": 1281
|
8977 |
+
},
|
8978 |
+
{
|
8979 |
+
"epoch": 0.05238747113989743,
|
8980 |
+
"grad_norm": 12.676177024841309,
|
8981 |
+
"learning_rate": 7.417077970336678e-06,
|
8982 |
+
"loss": 5.5461,
|
8983 |
+
"step": 1282
|
8984 |
+
},
|
8985 |
+
{
|
8986 |
+
"epoch": 0.05242833500194103,
|
8987 |
+
"grad_norm": 12.751127243041992,
|
8988 |
+
"learning_rate": 7.3640183948181506e-06,
|
8989 |
+
"loss": 6.1601,
|
8990 |
+
"step": 1283
|
8991 |
+
},
|
8992 |
+
{
|
8993 |
+
"epoch": 0.052469198863984635,
|
8994 |
+
"grad_norm": 13.632148742675781,
|
8995 |
+
"learning_rate": 7.311134196457281e-06,
|
8996 |
+
"loss": 6.3303,
|
8997 |
+
"step": 1284
|
8998 |
+
},
|
8999 |
+
{
|
9000 |
+
"epoch": 0.052510062726028234,
|
9001 |
+
"grad_norm": 12.904389381408691,
|
9002 |
+
"learning_rate": 7.258425592785839e-06,
|
9003 |
+
"loss": 6.2029,
|
9004 |
+
"step": 1285
|
9005 |
+
},
|
9006 |
+
{
|
9007 |
+
"epoch": 0.05255092658807184,
|
9008 |
+
"grad_norm": 13.69832706451416,
|
9009 |
+
"learning_rate": 7.205892800613284e-06,
|
9010 |
+
"loss": 6.3765,
|
9011 |
+
"step": 1286
|
9012 |
+
},
|
9013 |
+
{
|
9014 |
+
"epoch": 0.05259179045011544,
|
9015 |
+
"grad_norm": 14.280808448791504,
|
9016 |
+
"learning_rate": 7.153536036025948e-06,
|
9017 |
+
"loss": 6.8123,
|
9018 |
+
"step": 1287
|
9019 |
+
},
|
9020 |
+
{
|
9021 |
+
"epoch": 0.05263265431215904,
|
9022 |
+
"grad_norm": 17.279685974121094,
|
9023 |
+
"learning_rate": 7.10135551438606e-06,
|
9024 |
+
"loss": 7.0462,
|
9025 |
+
"step": 1288
|
9026 |
+
},
|
9027 |
+
{
|
9028 |
+
"epoch": 0.05267351817420264,
|
9029 |
+
"grad_norm": 16.416654586791992,
|
9030 |
+
"learning_rate": 7.049351450330915e-06,
|
9031 |
+
"loss": 7.4677,
|
9032 |
+
"step": 1289
|
9033 |
+
},
|
9034 |
+
{
|
9035 |
+
"epoch": 0.052714382036246246,
|
9036 |
+
"grad_norm": 18.79620361328125,
|
9037 |
+
"learning_rate": 6.9975240577719645e-06,
|
9038 |
+
"loss": 6.447,
|
9039 |
+
"step": 1290
|
9040 |
+
},
|
9041 |
+
{
|
9042 |
+
"epoch": 0.052755245898289845,
|
9043 |
+
"grad_norm": 18.415451049804688,
|
9044 |
+
"learning_rate": 6.945873549893961e-06,
|
9045 |
+
"loss": 6.665,
|
9046 |
+
"step": 1291
|
9047 |
+
},
|
9048 |
+
{
|
9049 |
+
"epoch": 0.05279610976033345,
|
9050 |
+
"grad_norm": 15.684821128845215,
|
9051 |
+
"learning_rate": 6.894400139154056e-06,
|
9052 |
+
"loss": 6.0481,
|
9053 |
+
"step": 1292
|
9054 |
+
},
|
9055 |
+
{
|
9056 |
+
"epoch": 0.05283697362237705,
|
9057 |
+
"grad_norm": 16.285133361816406,
|
9058 |
+
"learning_rate": 6.8431040372809285e-06,
|
9059 |
+
"loss": 5.693,
|
9060 |
+
"step": 1293
|
9061 |
+
},
|
9062 |
+
{
|
9063 |
+
"epoch": 0.05287783748442065,
|
9064 |
+
"grad_norm": 15.192296981811523,
|
9065 |
+
"learning_rate": 6.79198545527393e-06,
|
9066 |
+
"loss": 6.244,
|
9067 |
+
"step": 1294
|
9068 |
+
},
|
9069 |
+
{
|
9070 |
+
"epoch": 0.05291870134646425,
|
9071 |
+
"grad_norm": 17.961584091186523,
|
9072 |
+
"learning_rate": 6.741044603402213e-06,
|
9073 |
+
"loss": 5.5759,
|
9074 |
+
"step": 1295
|
9075 |
+
},
|
9076 |
+
{
|
9077 |
+
"epoch": 0.05295956520850786,
|
9078 |
+
"grad_norm": 13.980565071105957,
|
9079 |
+
"learning_rate": 6.6902816912038816e-06,
|
9080 |
+
"loss": 4.6304,
|
9081 |
+
"step": 1296
|
9082 |
+
},
|
9083 |
+
{
|
9084 |
+
"epoch": 0.053000429070551455,
|
9085 |
+
"grad_norm": 19.309667587280273,
|
9086 |
+
"learning_rate": 6.6396969274850675e-06,
|
9087 |
+
"loss": 6.6212,
|
9088 |
+
"step": 1297
|
9089 |
+
},
|
9090 |
+
{
|
9091 |
+
"epoch": 0.05304129293259506,
|
9092 |
+
"grad_norm": 19.81852149963379,
|
9093 |
+
"learning_rate": 6.589290520319125e-06,
|
9094 |
+
"loss": 7.4413,
|
9095 |
+
"step": 1298
|
9096 |
+
},
|
9097 |
+
{
|
9098 |
+
"epoch": 0.05308215679463866,
|
9099 |
+
"grad_norm": 17.609085083007812,
|
9100 |
+
"learning_rate": 6.539062677045782e-06,
|
9101 |
+
"loss": 5.6315,
|
9102 |
+
"step": 1299
|
9103 |
+
},
|
9104 |
+
{
|
9105 |
+
"epoch": 0.053123020656682264,
|
9106 |
+
"grad_norm": 26.30453109741211,
|
9107 |
+
"learning_rate": 6.489013604270278e-06,
|
9108 |
+
"loss": 8.1746,
|
9109 |
+
"step": 1300
|
9110 |
+
},
|
9111 |
+
{
|
9112 |
+
"epoch": 0.05316388451872586,
|
9113 |
+
"grad_norm": 11.81814193725586,
|
9114 |
+
"learning_rate": 6.439143507862466e-06,
|
9115 |
+
"loss": 6.7105,
|
9116 |
+
"step": 1301
|
9117 |
+
},
|
9118 |
+
{
|
9119 |
+
"epoch": 0.05320474838076947,
|
9120 |
+
"grad_norm": 9.656513214111328,
|
9121 |
+
"learning_rate": 6.38945259295603e-06,
|
9122 |
+
"loss": 5.1346,
|
9123 |
+
"step": 1302
|
9124 |
+
},
|
9125 |
+
{
|
9126 |
+
"epoch": 0.053245612242813066,
|
9127 |
+
"grad_norm": 10.611899375915527,
|
9128 |
+
"learning_rate": 6.3399410639475966e-06,
|
9129 |
+
"loss": 5.8584,
|
9130 |
+
"step": 1303
|
9131 |
+
},
|
9132 |
+
{
|
9133 |
+
"epoch": 0.05328647610485667,
|
9134 |
+
"grad_norm": 9.092964172363281,
|
9135 |
+
"learning_rate": 6.290609124495939e-06,
|
9136 |
+
"loss": 4.9972,
|
9137 |
+
"step": 1304
|
9138 |
+
},
|
9139 |
+
{
|
9140 |
+
"epoch": 0.05332733996690027,
|
9141 |
+
"grad_norm": 11.171881675720215,
|
9142 |
+
"learning_rate": 6.241456977521115e-06,
|
9143 |
+
"loss": 6.2399,
|
9144 |
+
"step": 1305
|
9145 |
+
},
|
9146 |
+
{
|
9147 |
+
"epoch": 0.053368203828943875,
|
9148 |
+
"grad_norm": 9.803963661193848,
|
9149 |
+
"learning_rate": 6.19248482520361e-06,
|
9150 |
+
"loss": 5.4172,
|
9151 |
+
"step": 1306
|
9152 |
+
},
|
9153 |
+
{
|
9154 |
+
"epoch": 0.05340906769098747,
|
9155 |
+
"grad_norm": 12.769637107849121,
|
9156 |
+
"learning_rate": 6.14369286898352e-06,
|
9157 |
+
"loss": 5.907,
|
9158 |
+
"step": 1307
|
9159 |
+
},
|
9160 |
+
{
|
9161 |
+
"epoch": 0.05344993155303108,
|
9162 |
+
"grad_norm": 12.696666717529297,
|
9163 |
+
"learning_rate": 6.0950813095597695e-06,
|
9164 |
+
"loss": 5.905,
|
9165 |
+
"step": 1308
|
9166 |
+
},
|
9167 |
+
{
|
9168 |
+
"epoch": 0.05349079541507468,
|
9169 |
+
"grad_norm": 11.773411750793457,
|
9170 |
+
"learning_rate": 6.04665034688921e-06,
|
9171 |
+
"loss": 5.8702,
|
9172 |
+
"step": 1309
|
9173 |
+
},
|
9174 |
+
{
|
9175 |
+
"epoch": 0.05353165927711828,
|
9176 |
+
"grad_norm": 9.899693489074707,
|
9177 |
+
"learning_rate": 5.998400180185837e-06,
|
9178 |
+
"loss": 4.9315,
|
9179 |
+
"step": 1310
|
9180 |
+
},
|
9181 |
+
{
|
9182 |
+
"epoch": 0.05357252313916188,
|
9183 |
+
"grad_norm": 15.44206714630127,
|
9184 |
+
"learning_rate": 5.9503310079199734e-06,
|
9185 |
+
"loss": 6.3004,
|
9186 |
+
"step": 1311
|
9187 |
+
},
|
9188 |
+
{
|
9189 |
+
"epoch": 0.053613387001205486,
|
9190 |
+
"grad_norm": 12.296639442443848,
|
9191 |
+
"learning_rate": 5.902443027817472e-06,
|
9192 |
+
"loss": 6.0329,
|
9193 |
+
"step": 1312
|
9194 |
+
},
|
9195 |
+
{
|
9196 |
+
"epoch": 0.053654250863249084,
|
9197 |
+
"grad_norm": 10.341333389282227,
|
9198 |
+
"learning_rate": 5.854736436858838e-06,
|
9199 |
+
"loss": 5.3143,
|
9200 |
+
"step": 1313
|
9201 |
+
},
|
9202 |
+
{
|
9203 |
+
"epoch": 0.05369511472529269,
|
9204 |
+
"grad_norm": 9.71491813659668,
|
9205 |
+
"learning_rate": 5.807211431278469e-06,
|
9206 |
+
"loss": 5.2402,
|
9207 |
+
"step": 1314
|
9208 |
+
},
|
9209 |
+
{
|
9210 |
+
"epoch": 0.05373597858733629,
|
9211 |
+
"grad_norm": 11.967327117919922,
|
9212 |
+
"learning_rate": 5.759868206563834e-06,
|
9213 |
+
"loss": 6.1597,
|
9214 |
+
"step": 1315
|
9215 |
+
},
|
9216 |
+
{
|
9217 |
+
"epoch": 0.05377684244937989,
|
9218 |
+
"grad_norm": 12.041747093200684,
|
9219 |
+
"learning_rate": 5.712706957454689e-06,
|
9220 |
+
"loss": 6.1137,
|
9221 |
+
"step": 1316
|
9222 |
+
},
|
9223 |
+
{
|
9224 |
+
"epoch": 0.05381770631142349,
|
9225 |
+
"grad_norm": 11.523645401000977,
|
9226 |
+
"learning_rate": 5.6657278779422564e-06,
|
9227 |
+
"loss": 4.7146,
|
9228 |
+
"step": 1317
|
9229 |
+
},
|
9230 |
+
{
|
9231 |
+
"epoch": 0.053858570173467096,
|
9232 |
+
"grad_norm": 11.76720905303955,
|
9233 |
+
"learning_rate": 5.618931161268409e-06,
|
9234 |
+
"loss": 4.8229,
|
9235 |
+
"step": 1318
|
9236 |
+
},
|
9237 |
+
{
|
9238 |
+
"epoch": 0.053899434035510695,
|
9239 |
+
"grad_norm": 14.665324211120605,
|
9240 |
+
"learning_rate": 5.5723169999248985e-06,
|
9241 |
+
"loss": 6.3562,
|
9242 |
+
"step": 1319
|
9243 |
+
},
|
9244 |
+
{
|
9245 |
+
"epoch": 0.0539402978975543,
|
9246 |
+
"grad_norm": 12.501147270202637,
|
9247 |
+
"learning_rate": 5.52588558565259e-06,
|
9248 |
+
"loss": 5.7088,
|
9249 |
+
"step": 1320
|
9250 |
+
},
|
9251 |
+
{
|
9252 |
+
"epoch": 0.0539811617595979,
|
9253 |
+
"grad_norm": 11.123631477355957,
|
9254 |
+
"learning_rate": 5.479637109440605e-06,
|
9255 |
+
"loss": 6.0012,
|
9256 |
+
"step": 1321
|
9257 |
+
},
|
9258 |
+
{
|
9259 |
+
"epoch": 0.0540220256216415,
|
9260 |
+
"grad_norm": 13.458786964416504,
|
9261 |
+
"learning_rate": 5.433571761525608e-06,
|
9262 |
+
"loss": 5.2179,
|
9263 |
+
"step": 1322
|
9264 |
+
},
|
9265 |
+
{
|
9266 |
+
"epoch": 0.0540628894836851,
|
9267 |
+
"grad_norm": 12.442361831665039,
|
9268 |
+
"learning_rate": 5.387689731390971e-06,
|
9269 |
+
"loss": 5.8886,
|
9270 |
+
"step": 1323
|
9271 |
+
},
|
9272 |
+
{
|
9273 |
+
"epoch": 0.05410375334572871,
|
9274 |
+
"grad_norm": 12.343889236450195,
|
9275 |
+
"learning_rate": 5.3419912077660115e-06,
|
9276 |
+
"loss": 5.4713,
|
9277 |
+
"step": 1324
|
9278 |
+
},
|
9279 |
+
{
|
9280 |
+
"epoch": 0.054144617207772305,
|
9281 |
+
"grad_norm": 15.862980842590332,
|
9282 |
+
"learning_rate": 5.296476378625237e-06,
|
9283 |
+
"loss": 6.3647,
|
9284 |
+
"step": 1325
|
9285 |
+
},
|
9286 |
+
{
|
9287 |
+
"epoch": 0.05418548106981591,
|
9288 |
+
"grad_norm": 14.279685020446777,
|
9289 |
+
"learning_rate": 5.251145431187532e-06,
|
9290 |
+
"loss": 6.0743,
|
9291 |
+
"step": 1326
|
9292 |
+
},
|
9293 |
+
{
|
9294 |
+
"epoch": 0.05422634493185951,
|
9295 |
+
"grad_norm": 11.489834785461426,
|
9296 |
+
"learning_rate": 5.205998551915431e-06,
|
9297 |
+
"loss": 5.7051,
|
9298 |
+
"step": 1327
|
9299 |
+
},
|
9300 |
+
{
|
9301 |
+
"epoch": 0.054267208793903114,
|
9302 |
+
"grad_norm": 11.323694229125977,
|
9303 |
+
"learning_rate": 5.161035926514307e-06,
|
9304 |
+
"loss": 5.4545,
|
9305 |
+
"step": 1328
|
9306 |
+
},
|
9307 |
+
{
|
9308 |
+
"epoch": 0.05430807265594671,
|
9309 |
+
"grad_norm": 12.028011322021484,
|
9310 |
+
"learning_rate": 5.116257739931662e-06,
|
9311 |
+
"loss": 5.8448,
|
9312 |
+
"step": 1329
|
9313 |
+
},
|
9314 |
+
{
|
9315 |
+
"epoch": 0.05434893651799032,
|
9316 |
+
"grad_norm": 12.499610900878906,
|
9317 |
+
"learning_rate": 5.071664176356295e-06,
|
9318 |
+
"loss": 5.7263,
|
9319 |
+
"step": 1330
|
9320 |
+
},
|
9321 |
+
{
|
9322 |
+
"epoch": 0.054389800380033916,
|
9323 |
+
"grad_norm": 13.89228630065918,
|
9324 |
+
"learning_rate": 5.027255419217602e-06,
|
9325 |
+
"loss": 6.1402,
|
9326 |
+
"step": 1331
|
9327 |
+
},
|
9328 |
+
{
|
9329 |
+
"epoch": 0.05443066424207752,
|
9330 |
+
"grad_norm": 15.132948875427246,
|
9331 |
+
"learning_rate": 4.983031651184828e-06,
|
9332 |
+
"loss": 5.8142,
|
9333 |
+
"step": 1332
|
9334 |
+
},
|
9335 |
+
{
|
9336 |
+
"epoch": 0.05447152810412112,
|
9337 |
+
"grad_norm": 12.604890823364258,
|
9338 |
+
"learning_rate": 4.938993054166235e-06,
|
9339 |
+
"loss": 5.3971,
|
9340 |
+
"step": 1333
|
9341 |
+
},
|
9342 |
+
{
|
9343 |
+
"epoch": 0.054512391966164725,
|
9344 |
+
"grad_norm": 13.132485389709473,
|
9345 |
+
"learning_rate": 4.89513980930848e-06,
|
9346 |
+
"loss": 6.1247,
|
9347 |
+
"step": 1334
|
9348 |
+
},
|
9349 |
+
{
|
9350 |
+
"epoch": 0.05455325582820832,
|
9351 |
+
"grad_norm": 15.888174057006836,
|
9352 |
+
"learning_rate": 4.85147209699574e-06,
|
9353 |
+
"loss": 6.4279,
|
9354 |
+
"step": 1335
|
9355 |
+
},
|
9356 |
+
{
|
9357 |
+
"epoch": 0.05459411969025193,
|
9358 |
+
"grad_norm": 13.905696868896484,
|
9359 |
+
"learning_rate": 4.807990096849052e-06,
|
9360 |
+
"loss": 6.5553,
|
9361 |
+
"step": 1336
|
9362 |
+
},
|
9363 |
+
{
|
9364 |
+
"epoch": 0.05463498355229553,
|
9365 |
+
"grad_norm": 15.824579238891602,
|
9366 |
+
"learning_rate": 4.764693987725555e-06,
|
9367 |
+
"loss": 5.8727,
|
9368 |
+
"step": 1337
|
9369 |
+
},
|
9370 |
+
{
|
9371 |
+
"epoch": 0.05467584741433913,
|
9372 |
+
"grad_norm": 12.343487739562988,
|
9373 |
+
"learning_rate": 4.721583947717767e-06,
|
9374 |
+
"loss": 5.3657,
|
9375 |
+
"step": 1338
|
9376 |
+
},
|
9377 |
+
{
|
9378 |
+
"epoch": 0.05471671127638273,
|
9379 |
+
"grad_norm": 16.064315795898438,
|
9380 |
+
"learning_rate": 4.678660154152803e-06,
|
9381 |
+
"loss": 6.3825,
|
9382 |
+
"step": 1339
|
9383 |
+
},
|
9384 |
+
{
|
9385 |
+
"epoch": 0.054757575138426336,
|
9386 |
+
"grad_norm": 15.64239501953125,
|
9387 |
+
"learning_rate": 4.635922783591695e-06,
|
9388 |
+
"loss": 6.443,
|
9389 |
+
"step": 1340
|
9390 |
+
},
|
9391 |
+
{
|
9392 |
+
"epoch": 0.054798439000469934,
|
9393 |
+
"grad_norm": 14.100479125976562,
|
9394 |
+
"learning_rate": 4.593372011828645e-06,
|
9395 |
+
"loss": 4.8361,
|
9396 |
+
"step": 1341
|
9397 |
+
},
|
9398 |
+
{
|
9399 |
+
"epoch": 0.05483930286251354,
|
9400 |
+
"grad_norm": 14.435734748840332,
|
9401 |
+
"learning_rate": 4.5510080138903175e-06,
|
9402 |
+
"loss": 6.1017,
|
9403 |
+
"step": 1342
|
9404 |
+
},
|
9405 |
+
{
|
9406 |
+
"epoch": 0.05488016672455714,
|
9407 |
+
"grad_norm": 15.61367416381836,
|
9408 |
+
"learning_rate": 4.50883096403511e-06,
|
9409 |
+
"loss": 6.8161,
|
9410 |
+
"step": 1343
|
9411 |
+
},
|
9412 |
+
{
|
9413 |
+
"epoch": 0.05492103058660074,
|
9414 |
+
"grad_norm": 14.850663185119629,
|
9415 |
+
"learning_rate": 4.466841035752428e-06,
|
9416 |
+
"loss": 6.0298,
|
9417 |
+
"step": 1344
|
9418 |
+
},
|
9419 |
+
{
|
9420 |
+
"epoch": 0.05496189444864434,
|
9421 |
+
"grad_norm": 19.58012580871582,
|
9422 |
+
"learning_rate": 4.42503840176196e-06,
|
9423 |
+
"loss": 6.4416,
|
9424 |
+
"step": 1345
|
9425 |
+
},
|
9426 |
+
{
|
9427 |
+
"epoch": 0.055002758310687946,
|
9428 |
+
"grad_norm": 15.58175277709961,
|
9429 |
+
"learning_rate": 4.383423234013029e-06,
|
9430 |
+
"loss": 5.7028,
|
9431 |
+
"step": 1346
|
9432 |
+
},
|
9433 |
+
{
|
9434 |
+
"epoch": 0.055043622172731545,
|
9435 |
+
"grad_norm": 23.334970474243164,
|
9436 |
+
"learning_rate": 4.341995703683799e-06,
|
9437 |
+
"loss": 6.2679,
|
9438 |
+
"step": 1347
|
9439 |
+
},
|
9440 |
+
{
|
9441 |
+
"epoch": 0.05508448603477515,
|
9442 |
+
"grad_norm": 19.322080612182617,
|
9443 |
+
"learning_rate": 4.300755981180648e-06,
|
9444 |
+
"loss": 6.1203,
|
9445 |
+
"step": 1348
|
9446 |
+
},
|
9447 |
+
{
|
9448 |
+
"epoch": 0.05512534989681875,
|
9449 |
+
"grad_norm": 19.294343948364258,
|
9450 |
+
"learning_rate": 4.259704236137402e-06,
|
9451 |
+
"loss": 6.1612,
|
9452 |
+
"step": 1349
|
9453 |
+
},
|
9454 |
+
{
|
9455 |
+
"epoch": 0.05516621375886235,
|
9456 |
+
"grad_norm": 23.609317779541016,
|
9457 |
+
"learning_rate": 4.218840637414695e-06,
|
9458 |
+
"loss": 6.4344,
|
9459 |
+
"step": 1350
|
9460 |
+
},
|
9461 |
+
{
|
9462 |
+
"epoch": 0.05520707762090595,
|
9463 |
+
"grad_norm": 10.905501365661621,
|
9464 |
+
"learning_rate": 4.178165353099233e-06,
|
9465 |
+
"loss": 5.7916,
|
9466 |
+
"step": 1351
|
9467 |
+
},
|
9468 |
+
{
|
9469 |
+
"epoch": 0.05524794148294955,
|
9470 |
+
"grad_norm": 10.461952209472656,
|
9471 |
+
"learning_rate": 4.13767855050311e-06,
|
9472 |
+
"loss": 5.6574,
|
9473 |
+
"step": 1352
|
9474 |
+
},
|
9475 |
+
{
|
9476 |
+
"epoch": 0.055288805344993155,
|
9477 |
+
"grad_norm": 10.941486358642578,
|
9478 |
+
"learning_rate": 4.097380396163136e-06,
|
9479 |
+
"loss": 5.9305,
|
9480 |
+
"step": 1353
|
9481 |
+
},
|
9482 |
+
{
|
9483 |
+
"epoch": 0.055329669207036754,
|
9484 |
+
"grad_norm": 9.929145812988281,
|
9485 |
+
"learning_rate": 4.057271055840151e-06,
|
9486 |
+
"loss": 4.7839,
|
9487 |
+
"step": 1354
|
9488 |
+
},
|
9489 |
+
{
|
9490 |
+
"epoch": 0.05537053306908036,
|
9491 |
+
"grad_norm": 10.920966148376465,
|
9492 |
+
"learning_rate": 4.017350694518329e-06,
|
9493 |
+
"loss": 5.8171,
|
9494 |
+
"step": 1355
|
9495 |
+
},
|
9496 |
+
{
|
9497 |
+
"epoch": 0.05541139693112396,
|
9498 |
+
"grad_norm": 21.842693328857422,
|
9499 |
+
"learning_rate": 3.977619476404499e-06,
|
9500 |
+
"loss": 5.3902,
|
9501 |
+
"step": 1356
|
9502 |
+
},
|
9503 |
+
{
|
9504 |
+
"epoch": 0.05545226079316756,
|
9505 |
+
"grad_norm": 13.601142883300781,
|
9506 |
+
"learning_rate": 3.938077564927467e-06,
|
9507 |
+
"loss": 6.2075,
|
9508 |
+
"step": 1357
|
9509 |
+
},
|
9510 |
+
{
|
9511 |
+
"epoch": 0.05549312465521116,
|
9512 |
+
"grad_norm": 11.834310531616211,
|
9513 |
+
"learning_rate": 3.8987251227373734e-06,
|
9514 |
+
"loss": 6.2559,
|
9515 |
+
"step": 1358
|
9516 |
+
},
|
9517 |
+
{
|
9518 |
+
"epoch": 0.055533988517254766,
|
9519 |
+
"grad_norm": 10.8425931930542,
|
9520 |
+
"learning_rate": 3.859562311705011e-06,
|
9521 |
+
"loss": 4.917,
|
9522 |
+
"step": 1359
|
9523 |
+
},
|
9524 |
+
{
|
9525 |
+
"epoch": 0.055574852379298365,
|
9526 |
+
"grad_norm": 14.948829650878906,
|
9527 |
+
"learning_rate": 3.820589292921117e-06,
|
9528 |
+
"loss": 6.796,
|
9529 |
+
"step": 1360
|
9530 |
+
},
|
9531 |
+
{
|
9532 |
+
"epoch": 0.05561571624134197,
|
9533 |
+
"grad_norm": 19.911386489868164,
|
9534 |
+
"learning_rate": 3.7818062266957722e-06,
|
9535 |
+
"loss": 5.654,
|
9536 |
+
"step": 1361
|
9537 |
+
},
|
9538 |
+
{
|
9539 |
+
"epoch": 0.05565658010338557,
|
9540 |
+
"grad_norm": 10.68065071105957,
|
9541 |
+
"learning_rate": 3.7432132725576885e-06,
|
9542 |
+
"loss": 5.7488,
|
9543 |
+
"step": 1362
|
9544 |
+
},
|
9545 |
+
{
|
9546 |
+
"epoch": 0.05569744396542917,
|
9547 |
+
"grad_norm": 12.978265762329102,
|
9548 |
+
"learning_rate": 3.704810589253621e-06,
|
9549 |
+
"loss": 5.9216,
|
9550 |
+
"step": 1363
|
9551 |
+
},
|
9552 |
+
{
|
9553 |
+
"epoch": 0.05573830782747277,
|
9554 |
+
"grad_norm": 14.055144309997559,
|
9555 |
+
"learning_rate": 3.6665983347476174e-06,
|
9556 |
+
"loss": 6.0299,
|
9557 |
+
"step": 1364
|
9558 |
+
},
|
9559 |
+
{
|
9560 |
+
"epoch": 0.05577917168951638,
|
9561 |
+
"grad_norm": 13.637020111083984,
|
9562 |
+
"learning_rate": 3.6285766662204735e-06,
|
9563 |
+
"loss": 5.782,
|
9564 |
+
"step": 1365
|
9565 |
+
},
|
9566 |
+
{
|
9567 |
+
"epoch": 0.055820035551559975,
|
9568 |
+
"grad_norm": 15.15635871887207,
|
9569 |
+
"learning_rate": 3.590745740068996e-06,
|
9570 |
+
"loss": 6.4302,
|
9571 |
+
"step": 1366
|
9572 |
+
},
|
9573 |
+
{
|
9574 |
+
"epoch": 0.05586089941360358,
|
9575 |
+
"grad_norm": 11.472108840942383,
|
9576 |
+
"learning_rate": 3.5531057119054414e-06,
|
9577 |
+
"loss": 5.7443,
|
9578 |
+
"step": 1367
|
9579 |
+
},
|
9580 |
+
{
|
9581 |
+
"epoch": 0.05590176327564718,
|
9582 |
+
"grad_norm": 11.374180793762207,
|
9583 |
+
"learning_rate": 3.515656736556794e-06,
|
9584 |
+
"loss": 5.204,
|
9585 |
+
"step": 1368
|
9586 |
+
},
|
9587 |
+
{
|
9588 |
+
"epoch": 0.055942627137690784,
|
9589 |
+
"grad_norm": 11.667403221130371,
|
9590 |
+
"learning_rate": 3.478398968064178e-06,
|
9591 |
+
"loss": 5.574,
|
9592 |
+
"step": 1369
|
9593 |
+
},
|
9594 |
+
{
|
9595 |
+
"epoch": 0.05598349099973438,
|
9596 |
+
"grad_norm": 12.843528747558594,
|
9597 |
+
"learning_rate": 3.4413325596822423e-06,
|
9598 |
+
"loss": 5.8785,
|
9599 |
+
"step": 1370
|
9600 |
+
},
|
9601 |
+
{
|
9602 |
+
"epoch": 0.05602435486177799,
|
9603 |
+
"grad_norm": 11.200161933898926,
|
9604 |
+
"learning_rate": 3.4044576638784685e-06,
|
9605 |
+
"loss": 5.003,
|
9606 |
+
"step": 1371
|
9607 |
+
},
|
9608 |
+
{
|
9609 |
+
"epoch": 0.056065218723821586,
|
9610 |
+
"grad_norm": 14.955963134765625,
|
9611 |
+
"learning_rate": 3.3677744323325987e-06,
|
9612 |
+
"loss": 6.0432,
|
9613 |
+
"step": 1372
|
9614 |
+
},
|
9615 |
+
{
|
9616 |
+
"epoch": 0.05610608258586519,
|
9617 |
+
"grad_norm": 11.538397789001465,
|
9618 |
+
"learning_rate": 3.3312830159359876e-06,
|
9619 |
+
"loss": 5.5782,
|
9620 |
+
"step": 1373
|
9621 |
+
},
|
9622 |
+
{
|
9623 |
+
"epoch": 0.05614694644790879,
|
9624 |
+
"grad_norm": 13.244929313659668,
|
9625 |
+
"learning_rate": 3.2949835647909656e-06,
|
9626 |
+
"loss": 6.2672,
|
9627 |
+
"step": 1374
|
9628 |
+
},
|
9629 |
+
{
|
9630 |
+
"epoch": 0.056187810309952395,
|
9631 |
+
"grad_norm": 11.738024711608887,
|
9632 |
+
"learning_rate": 3.258876228210267e-06,
|
9633 |
+
"loss": 5.8058,
|
9634 |
+
"step": 1375
|
9635 |
+
},
|
9636 |
+
{
|
9637 |
+
"epoch": 0.05622867417199599,
|
9638 |
+
"grad_norm": 13.42725944519043,
|
9639 |
+
"learning_rate": 3.222961154716392e-06,
|
9640 |
+
"loss": 5.8394,
|
9641 |
+
"step": 1376
|
9642 |
+
},
|
9643 |
+
{
|
9644 |
+
"epoch": 0.0562695380340396,
|
9645 |
+
"grad_norm": 12.933088302612305,
|
9646 |
+
"learning_rate": 3.1872384920409613e-06,
|
9647 |
+
"loss": 6.2117,
|
9648 |
+
"step": 1377
|
9649 |
+
},
|
9650 |
+
{
|
9651 |
+
"epoch": 0.0563104018960832,
|
9652 |
+
"grad_norm": 10.286880493164062,
|
9653 |
+
"learning_rate": 3.151708387124175e-06,
|
9654 |
+
"loss": 4.6895,
|
9655 |
+
"step": 1378
|
9656 |
+
},
|
9657 |
+
{
|
9658 |
+
"epoch": 0.0563512657581268,
|
9659 |
+
"grad_norm": 10.749935150146484,
|
9660 |
+
"learning_rate": 3.1163709861141478e-06,
|
9661 |
+
"loss": 4.7426,
|
9662 |
+
"step": 1379
|
9663 |
+
},
|
9664 |
+
{
|
9665 |
+
"epoch": 0.0563921296201704,
|
9666 |
+
"grad_norm": 12.68270492553711,
|
9667 |
+
"learning_rate": 3.0812264343663466e-06,
|
9668 |
+
"loss": 5.1007,
|
9669 |
+
"step": 1380
|
9670 |
+
},
|
9671 |
+
{
|
9672 |
+
"epoch": 0.056432993482214006,
|
9673 |
+
"grad_norm": 13.24690055847168,
|
9674 |
+
"learning_rate": 3.046274876442989e-06,
|
9675 |
+
"loss": 6.1502,
|
9676 |
+
"step": 1381
|
9677 |
+
},
|
9678 |
+
{
|
9679 |
+
"epoch": 0.056473857344257604,
|
9680 |
+
"grad_norm": 14.038854598999023,
|
9681 |
+
"learning_rate": 3.0115164561124176e-06,
|
9682 |
+
"loss": 6.2595,
|
9683 |
+
"step": 1382
|
9684 |
+
},
|
9685 |
+
{
|
9686 |
+
"epoch": 0.05651472120630121,
|
9687 |
+
"grad_norm": 13.148086547851562,
|
9688 |
+
"learning_rate": 2.976951316348542e-06,
|
9689 |
+
"loss": 5.4964,
|
9690 |
+
"step": 1383
|
9691 |
+
},
|
9692 |
+
{
|
9693 |
+
"epoch": 0.05655558506834481,
|
9694 |
+
"grad_norm": 13.133016586303711,
|
9695 |
+
"learning_rate": 2.9425795993302474e-06,
|
9696 |
+
"loss": 5.1961,
|
9697 |
+
"step": 1384
|
9698 |
+
},
|
9699 |
+
{
|
9700 |
+
"epoch": 0.05659644893038841,
|
9701 |
+
"grad_norm": 14.196730613708496,
|
9702 |
+
"learning_rate": 2.908401446440784e-06,
|
9703 |
+
"loss": 6.0362,
|
9704 |
+
"step": 1385
|
9705 |
+
},
|
9706 |
+
{
|
9707 |
+
"epoch": 0.05663731279243201,
|
9708 |
+
"grad_norm": 15.088958740234375,
|
9709 |
+
"learning_rate": 2.8744169982672207e-06,
|
9710 |
+
"loss": 6.3641,
|
9711 |
+
"step": 1386
|
9712 |
+
},
|
9713 |
+
{
|
9714 |
+
"epoch": 0.056678176654475616,
|
9715 |
+
"grad_norm": 14.354655265808105,
|
9716 |
+
"learning_rate": 2.840626394599827e-06,
|
9717 |
+
"loss": 6.3393,
|
9718 |
+
"step": 1387
|
9719 |
+
},
|
9720 |
+
{
|
9721 |
+
"epoch": 0.056719040516519215,
|
9722 |
+
"grad_norm": 14.473902702331543,
|
9723 |
+
"learning_rate": 2.8070297744315465e-06,
|
9724 |
+
"loss": 5.4706,
|
9725 |
+
"step": 1388
|
9726 |
+
},
|
9727 |
+
{
|
9728 |
+
"epoch": 0.05675990437856282,
|
9729 |
+
"grad_norm": 13.960811614990234,
|
9730 |
+
"learning_rate": 2.7736272759573856e-06,
|
9731 |
+
"loss": 5.1405,
|
9732 |
+
"step": 1389
|
9733 |
+
},
|
9734 |
+
{
|
9735 |
+
"epoch": 0.05680076824060642,
|
9736 |
+
"grad_norm": 21.69525909423828,
|
9737 |
+
"learning_rate": 2.7404190365738437e-06,
|
9738 |
+
"loss": 5.6208,
|
9739 |
+
"step": 1390
|
9740 |
+
},
|
9741 |
+
{
|
9742 |
+
"epoch": 0.056841632102650023,
|
9743 |
+
"grad_norm": 13.99712085723877,
|
9744 |
+
"learning_rate": 2.707405192878393e-06,
|
9745 |
+
"loss": 5.2746,
|
9746 |
+
"step": 1391
|
9747 |
+
},
|
9748 |
+
{
|
9749 |
+
"epoch": 0.05688249596469362,
|
9750 |
+
"grad_norm": 16.092470169067383,
|
9751 |
+
"learning_rate": 2.674585880668856e-06,
|
9752 |
+
"loss": 6.3549,
|
9753 |
+
"step": 1392
|
9754 |
+
},
|
9755 |
+
{
|
9756 |
+
"epoch": 0.05692335982673723,
|
9757 |
+
"grad_norm": 16.60957145690918,
|
9758 |
+
"learning_rate": 2.641961234942908e-06,
|
9759 |
+
"loss": 5.9754,
|
9760 |
+
"step": 1393
|
9761 |
+
},
|
9762 |
+
{
|
9763 |
+
"epoch": 0.056964223688780825,
|
9764 |
+
"grad_norm": 14.698297500610352,
|
9765 |
+
"learning_rate": 2.6095313898974628e-06,
|
9766 |
+
"loss": 5.1192,
|
9767 |
+
"step": 1394
|
9768 |
+
},
|
9769 |
+
{
|
9770 |
+
"epoch": 0.05700508755082443,
|
9771 |
+
"grad_norm": 15.271346092224121,
|
9772 |
+
"learning_rate": 2.5772964789281597e-06,
|
9773 |
+
"loss": 4.6252,
|
9774 |
+
"step": 1395
|
9775 |
+
},
|
9776 |
+
{
|
9777 |
+
"epoch": 0.05704595141286803,
|
9778 |
+
"grad_norm": 15.326761245727539,
|
9779 |
+
"learning_rate": 2.5452566346288064e-06,
|
9780 |
+
"loss": 4.9674,
|
9781 |
+
"step": 1396
|
9782 |
+
},
|
9783 |
+
{
|
9784 |
+
"epoch": 0.057086815274911634,
|
9785 |
+
"grad_norm": 17.644712448120117,
|
9786 |
+
"learning_rate": 2.5134119887908368e-06,
|
9787 |
+
"loss": 6.3781,
|
9788 |
+
"step": 1397
|
9789 |
+
},
|
9790 |
+
{
|
9791 |
+
"epoch": 0.05712767913695523,
|
9792 |
+
"grad_norm": 16.700098037719727,
|
9793 |
+
"learning_rate": 2.4817626724027487e-06,
|
9794 |
+
"loss": 5.039,
|
9795 |
+
"step": 1398
|
9796 |
+
},
|
9797 |
+
{
|
9798 |
+
"epoch": 0.05716854299899884,
|
9799 |
+
"grad_norm": 18.29340934753418,
|
9800 |
+
"learning_rate": 2.4503088156495823e-06,
|
9801 |
+
"loss": 6.381,
|
9802 |
+
"step": 1399
|
9803 |
+
},
|
9804 |
+
{
|
9805 |
+
"epoch": 0.057209406861042436,
|
9806 |
+
"grad_norm": 32.03140640258789,
|
9807 |
+
"learning_rate": 2.419050547912388e-06,
|
9808 |
+
"loss": 7.1019,
|
9809 |
+
"step": 1400
|
9810 |
+
},
|
9811 |
+
{
|
9812 |
+
"epoch": 0.05725027072308604,
|
9813 |
+
"grad_norm": 14.21849536895752,
|
9814 |
+
"learning_rate": 2.3879879977676933e-06,
|
9815 |
+
"loss": 7.1429,
|
9816 |
+
"step": 1401
|
9817 |
+
},
|
9818 |
+
{
|
9819 |
+
"epoch": 0.05729113458512964,
|
9820 |
+
"grad_norm": 12.14288330078125,
|
9821 |
+
"learning_rate": 2.357121292986958e-06,
|
9822 |
+
"loss": 6.596,
|
9823 |
+
"step": 1402
|
9824 |
+
},
|
9825 |
+
{
|
9826 |
+
"epoch": 0.057331998447173245,
|
9827 |
+
"grad_norm": 10.332315444946289,
|
9828 |
+
"learning_rate": 2.3264505605360587e-06,
|
9829 |
+
"loss": 5.9791,
|
9830 |
+
"step": 1403
|
9831 |
+
},
|
9832 |
+
{
|
9833 |
+
"epoch": 0.05737286230921684,
|
9834 |
+
"grad_norm": 11.936444282531738,
|
9835 |
+
"learning_rate": 2.295975926574767e-06,
|
9836 |
+
"loss": 5.2107,
|
9837 |
+
"step": 1404
|
9838 |
+
},
|
9839 |
+
{
|
9840 |
+
"epoch": 0.05741372617126045,
|
9841 |
+
"grad_norm": 11.68570613861084,
|
9842 |
+
"learning_rate": 2.26569751645625e-06,
|
9843 |
+
"loss": 6.0945,
|
9844 |
+
"step": 1405
|
9845 |
+
},
|
9846 |
+
{
|
9847 |
+
"epoch": 0.05745459003330405,
|
9848 |
+
"grad_norm": 12.234179496765137,
|
9849 |
+
"learning_rate": 2.235615454726514e-06,
|
9850 |
+
"loss": 6.4366,
|
9851 |
+
"step": 1406
|
9852 |
+
},
|
9853 |
+
{
|
9854 |
+
"epoch": 0.05749545389534765,
|
9855 |
+
"grad_norm": 13.551424026489258,
|
9856 |
+
"learning_rate": 2.205729865123912e-06,
|
9857 |
+
"loss": 5.8915,
|
9858 |
+
"step": 1407
|
9859 |
+
},
|
9860 |
+
{
|
9861 |
+
"epoch": 0.05753631775739125,
|
9862 |
+
"grad_norm": 11.42490005493164,
|
9863 |
+
"learning_rate": 2.176040870578655e-06,
|
9864 |
+
"loss": 5.4649,
|
9865 |
+
"step": 1408
|
9866 |
+
},
|
9867 |
+
{
|
9868 |
+
"epoch": 0.057577181619434856,
|
9869 |
+
"grad_norm": 12.296146392822266,
|
9870 |
+
"learning_rate": 2.146548593212261e-06,
|
9871 |
+
"loss": 5.2908,
|
9872 |
+
"step": 1409
|
9873 |
+
},
|
9874 |
+
{
|
9875 |
+
"epoch": 0.057618045481478454,
|
9876 |
+
"grad_norm": 13.628933906555176,
|
9877 |
+
"learning_rate": 2.1172531543371176e-06,
|
9878 |
+
"loss": 5.6548,
|
9879 |
+
"step": 1410
|
9880 |
+
},
|
9881 |
+
{
|
9882 |
+
"epoch": 0.05765890934352206,
|
9883 |
+
"grad_norm": 11.3690185546875,
|
9884 |
+
"learning_rate": 2.088154674455911e-06,
|
9885 |
+
"loss": 5.422,
|
9886 |
+
"step": 1411
|
9887 |
+
},
|
9888 |
+
{
|
9889 |
+
"epoch": 0.05769977320556566,
|
9890 |
+
"grad_norm": 10.398890495300293,
|
9891 |
+
"learning_rate": 2.059253273261169e-06,
|
9892 |
+
"loss": 5.1192,
|
9893 |
+
"step": 1412
|
9894 |
+
},
|
9895 |
+
{
|
9896 |
+
"epoch": 0.05774063706760926,
|
9897 |
+
"grad_norm": 11.483975410461426,
|
9898 |
+
"learning_rate": 2.0305490696347795e-06,
|
9899 |
+
"loss": 4.9472,
|
9900 |
+
"step": 1413
|
9901 |
+
},
|
9902 |
+
{
|
9903 |
+
"epoch": 0.05778150092965286,
|
9904 |
+
"grad_norm": 11.797661781311035,
|
9905 |
+
"learning_rate": 2.002042181647479e-06,
|
9906 |
+
"loss": 5.8422,
|
9907 |
+
"step": 1414
|
9908 |
+
},
|
9909 |
+
{
|
9910 |
+
"epoch": 0.057822364791696466,
|
9911 |
+
"grad_norm": 12.492131233215332,
|
9912 |
+
"learning_rate": 1.973732726558364e-06,
|
9913 |
+
"loss": 7.2811,
|
9914 |
+
"step": 1415
|
9915 |
+
},
|
9916 |
+
{
|
9917 |
+
"epoch": 0.057863228653740065,
|
9918 |
+
"grad_norm": 13.242003440856934,
|
9919 |
+
"learning_rate": 1.9456208208144313e-06,
|
9920 |
+
"loss": 5.1037,
|
9921 |
+
"step": 1416
|
9922 |
+
},
|
9923 |
+
{
|
9924 |
+
"epoch": 0.05790409251578367,
|
9925 |
+
"grad_norm": 14.247307777404785,
|
9926 |
+
"learning_rate": 1.917706580050066e-06,
|
9927 |
+
"loss": 6.2367,
|
9928 |
+
"step": 1417
|
9929 |
+
},
|
9930 |
+
{
|
9931 |
+
"epoch": 0.05794495637782727,
|
9932 |
+
"grad_norm": 11.60015869140625,
|
9933 |
+
"learning_rate": 1.889990119086621e-06,
|
9934 |
+
"loss": 5.4071,
|
9935 |
+
"step": 1418
|
9936 |
+
},
|
9937 |
+
{
|
9938 |
+
"epoch": 0.057985820239870874,
|
9939 |
+
"grad_norm": 11.598762512207031,
|
9940 |
+
"learning_rate": 1.862471551931877e-06,
|
9941 |
+
"loss": 5.3924,
|
9942 |
+
"step": 1419
|
9943 |
+
},
|
9944 |
+
{
|
9945 |
+
"epoch": 0.05802668410191447,
|
9946 |
+
"grad_norm": 12.020844459533691,
|
9947 |
+
"learning_rate": 1.8351509917796216e-06,
|
9948 |
+
"loss": 5.1825,
|
9949 |
+
"step": 1420
|
9950 |
+
},
|
9951 |
+
{
|
9952 |
+
"epoch": 0.05806754796395807,
|
9953 |
+
"grad_norm": 12.868925094604492,
|
9954 |
+
"learning_rate": 1.8080285510091554e-06,
|
9955 |
+
"loss": 6.1717,
|
9956 |
+
"step": 1421
|
9957 |
+
},
|
9958 |
+
{
|
9959 |
+
"epoch": 0.058108411826001675,
|
9960 |
+
"grad_norm": 14.97560977935791,
|
9961 |
+
"learning_rate": 1.7811043411848693e-06,
|
9962 |
+
"loss": 5.4369,
|
9963 |
+
"step": 1422
|
9964 |
+
},
|
9965 |
+
{
|
9966 |
+
"epoch": 0.058149275688045274,
|
9967 |
+
"grad_norm": 12.455634117126465,
|
9968 |
+
"learning_rate": 1.7543784730557234e-06,
|
9969 |
+
"loss": 6.155,
|
9970 |
+
"step": 1423
|
9971 |
+
},
|
9972 |
+
{
|
9973 |
+
"epoch": 0.05819013955008888,
|
9974 |
+
"grad_norm": 15.083786010742188,
|
9975 |
+
"learning_rate": 1.7278510565548577e-06,
|
9976 |
+
"loss": 6.4782,
|
9977 |
+
"step": 1424
|
9978 |
+
},
|
9979 |
+
{
|
9980 |
+
"epoch": 0.05823100341213248,
|
9981 |
+
"grad_norm": 12.602248191833496,
|
9982 |
+
"learning_rate": 1.7015222007990883e-06,
|
9983 |
+
"loss": 5.2377,
|
9984 |
+
"step": 1425
|
9985 |
+
},
|
9986 |
+
{
|
9987 |
+
"epoch": 0.05827186727417608,
|
9988 |
+
"grad_norm": 12.938545227050781,
|
9989 |
+
"learning_rate": 1.675392014088495e-06,
|
9990 |
+
"loss": 5.9429,
|
9991 |
+
"step": 1426
|
9992 |
+
},
|
9993 |
+
{
|
9994 |
+
"epoch": 0.05831273113621968,
|
9995 |
+
"grad_norm": 16.22939109802246,
|
9996 |
+
"learning_rate": 1.6494606039059502e-06,
|
9997 |
+
"loss": 6.4306,
|
9998 |
+
"step": 1427
|
9999 |
+
},
|
10000 |
+
{
|
10001 |
+
"epoch": 0.058353594998263286,
|
10002 |
+
"grad_norm": 13.610986709594727,
|
10003 |
+
"learning_rate": 1.6237280769166807e-06,
|
10004 |
+
"loss": 5.8904,
|
10005 |
+
"step": 1428
|
10006 |
+
},
|
10007 |
+
{
|
10008 |
+
"epoch": 0.058394458860306885,
|
10009 |
+
"grad_norm": 13.104776382446289,
|
10010 |
+
"learning_rate": 1.5981945389678566e-06,
|
10011 |
+
"loss": 5.5021,
|
10012 |
+
"step": 1429
|
10013 |
+
},
|
10014 |
+
{
|
10015 |
+
"epoch": 0.05843532272235049,
|
10016 |
+
"grad_norm": 12.524452209472656,
|
10017 |
+
"learning_rate": 1.572860095088108e-06,
|
10018 |
+
"loss": 5.4969,
|
10019 |
+
"step": 1430
|
10020 |
+
},
|
10021 |
+
{
|
10022 |
+
"epoch": 0.05847618658439409,
|
10023 |
+
"grad_norm": 16.706439971923828,
|
10024 |
+
"learning_rate": 1.547724849487142e-06,
|
10025 |
+
"loss": 6.3696,
|
10026 |
+
"step": 1431
|
10027 |
+
},
|
10028 |
+
{
|
10029 |
+
"epoch": 0.05851705044643769,
|
10030 |
+
"grad_norm": 12.304777145385742,
|
10031 |
+
"learning_rate": 1.5227889055552836e-06,
|
10032 |
+
"loss": 5.3329,
|
10033 |
+
"step": 1432
|
10034 |
+
},
|
10035 |
+
{
|
10036 |
+
"epoch": 0.05855791430848129,
|
10037 |
+
"grad_norm": 14.964431762695312,
|
10038 |
+
"learning_rate": 1.4980523658630453e-06,
|
10039 |
+
"loss": 7.206,
|
10040 |
+
"step": 1433
|
10041 |
+
},
|
10042 |
+
{
|
10043 |
+
"epoch": 0.0585987781705249,
|
10044 |
+
"grad_norm": 13.057554244995117,
|
10045 |
+
"learning_rate": 1.473515332160741e-06,
|
10046 |
+
"loss": 6.3246,
|
10047 |
+
"step": 1434
|
10048 |
+
},
|
10049 |
+
{
|
10050 |
+
"epoch": 0.058639642032568495,
|
10051 |
+
"grad_norm": 16.151348114013672,
|
10052 |
+
"learning_rate": 1.4491779053780296e-06,
|
10053 |
+
"loss": 6.6896,
|
10054 |
+
"step": 1435
|
10055 |
+
},
|
10056 |
+
{
|
10057 |
+
"epoch": 0.0586805058946121,
|
10058 |
+
"grad_norm": 13.605801582336426,
|
10059 |
+
"learning_rate": 1.4250401856235218e-06,
|
10060 |
+
"loss": 5.5107,
|
10061 |
+
"step": 1436
|
10062 |
+
},
|
10063 |
+
{
|
10064 |
+
"epoch": 0.0587213697566557,
|
10065 |
+
"grad_norm": 15.856985092163086,
|
10066 |
+
"learning_rate": 1.4011022721843514e-06,
|
10067 |
+
"loss": 6.3843,
|
10068 |
+
"step": 1437
|
10069 |
+
},
|
10070 |
+
{
|
10071 |
+
"epoch": 0.058762233618699304,
|
10072 |
+
"grad_norm": 12.339056968688965,
|
10073 |
+
"learning_rate": 1.3773642635257821e-06,
|
10074 |
+
"loss": 5.4914,
|
10075 |
+
"step": 1438
|
10076 |
+
},
|
10077 |
+
{
|
10078 |
+
"epoch": 0.0588030974807429,
|
10079 |
+
"grad_norm": 17.9697265625,
|
10080 |
+
"learning_rate": 1.3538262572908022e-06,
|
10081 |
+
"loss": 5.8273,
|
10082 |
+
"step": 1439
|
10083 |
+
},
|
10084 |
+
{
|
10085 |
+
"epoch": 0.05884396134278651,
|
10086 |
+
"grad_norm": 14.805066108703613,
|
10087 |
+
"learning_rate": 1.3304883502997134e-06,
|
10088 |
+
"loss": 5.6195,
|
10089 |
+
"step": 1440
|
10090 |
+
},
|
10091 |
+
{
|
10092 |
+
"epoch": 0.058884825204830106,
|
10093 |
+
"grad_norm": 12.795866012573242,
|
10094 |
+
"learning_rate": 1.3073506385497424e-06,
|
10095 |
+
"loss": 4.8128,
|
10096 |
+
"step": 1441
|
10097 |
+
},
|
10098 |
+
{
|
10099 |
+
"epoch": 0.05892568906687371,
|
10100 |
+
"grad_norm": 16.51844596862793,
|
10101 |
+
"learning_rate": 1.2844132172146307e-06,
|
10102 |
+
"loss": 5.0841,
|
10103 |
+
"step": 1442
|
10104 |
+
},
|
10105 |
+
{
|
10106 |
+
"epoch": 0.05896655292891731,
|
10107 |
+
"grad_norm": 16.40656280517578,
|
10108 |
+
"learning_rate": 1.2616761806442557e-06,
|
10109 |
+
"loss": 7.3637,
|
10110 |
+
"step": 1443
|
10111 |
+
},
|
10112 |
+
{
|
10113 |
+
"epoch": 0.059007416790960915,
|
10114 |
+
"grad_norm": 13.929911613464355,
|
10115 |
+
"learning_rate": 1.2391396223642492e-06,
|
10116 |
+
"loss": 4.7891,
|
10117 |
+
"step": 1444
|
10118 |
+
},
|
10119 |
+
{
|
10120 |
+
"epoch": 0.05904828065300451,
|
10121 |
+
"grad_norm": 15.564924240112305,
|
10122 |
+
"learning_rate": 1.2168036350755974e-06,
|
10123 |
+
"loss": 6.9448,
|
10124 |
+
"step": 1445
|
10125 |
+
},
|
10126 |
+
{
|
10127 |
+
"epoch": 0.05908914451504812,
|
10128 |
+
"grad_norm": 16.948013305664062,
|
10129 |
+
"learning_rate": 1.1946683106542622e-06,
|
10130 |
+
"loss": 6.4067,
|
10131 |
+
"step": 1446
|
10132 |
+
},
|
10133 |
+
{
|
10134 |
+
"epoch": 0.05913000837709172,
|
10135 |
+
"grad_norm": 16.79241180419922,
|
10136 |
+
"learning_rate": 1.1727337401508053e-06,
|
10137 |
+
"loss": 5.9192,
|
10138 |
+
"step": 1447
|
10139 |
+
},
|
10140 |
+
{
|
10141 |
+
"epoch": 0.05917087223913532,
|
10142 |
+
"grad_norm": 16.887653350830078,
|
10143 |
+
"learning_rate": 1.1510000137900268e-06,
|
10144 |
+
"loss": 5.3616,
|
10145 |
+
"step": 1448
|
10146 |
+
},
|
10147 |
+
{
|
10148 |
+
"epoch": 0.05921173610117892,
|
10149 |
+
"grad_norm": 20.964611053466797,
|
10150 |
+
"learning_rate": 1.1294672209705814e-06,
|
10151 |
+
"loss": 6.4104,
|
10152 |
+
"step": 1449
|
10153 |
+
},
|
10154 |
+
{
|
10155 |
+
"epoch": 0.059252599963222526,
|
10156 |
+
"grad_norm": 24.013517379760742,
|
10157 |
+
"learning_rate": 1.1081354502645914e-06,
|
10158 |
+
"loss": 6.2491,
|
10159 |
+
"step": 1450
|
10160 |
+
},
|
10161 |
+
{
|
10162 |
+
"epoch": 0.059293463825266124,
|
10163 |
+
"grad_norm": 9.17637825012207,
|
10164 |
+
"learning_rate": 1.087004789417323e-06,
|
10165 |
+
"loss": 5.5373,
|
10166 |
+
"step": 1451
|
10167 |
+
},
|
10168 |
+
{
|
10169 |
+
"epoch": 0.05933432768730973,
|
10170 |
+
"grad_norm": 13.024526596069336,
|
10171 |
+
"learning_rate": 1.0660753253468102e-06,
|
10172 |
+
"loss": 6.5182,
|
10173 |
+
"step": 1452
|
10174 |
+
},
|
10175 |
+
{
|
10176 |
+
"epoch": 0.05937519154935333,
|
10177 |
+
"grad_norm": 10.952601432800293,
|
10178 |
+
"learning_rate": 1.0453471441434659e-06,
|
10179 |
+
"loss": 5.3951,
|
10180 |
+
"step": 1453
|
10181 |
+
},
|
10182 |
+
{
|
10183 |
+
"epoch": 0.05941605541139693,
|
10184 |
+
"grad_norm": 12.251294136047363,
|
10185 |
+
"learning_rate": 1.0248203310697812e-06,
|
10186 |
+
"loss": 6.6093,
|
10187 |
+
"step": 1454
|
10188 |
+
},
|
10189 |
+
{
|
10190 |
+
"epoch": 0.05945691927344053,
|
10191 |
+
"grad_norm": 11.657454490661621,
|
10192 |
+
"learning_rate": 1.0044949705599216e-06,
|
10193 |
+
"loss": 5.694,
|
10194 |
+
"step": 1455
|
10195 |
+
},
|
10196 |
+
{
|
10197 |
+
"epoch": 0.059497783135484136,
|
10198 |
+
"grad_norm": 13.22636604309082,
|
10199 |
+
"learning_rate": 9.843711462194372e-07,
|
10200 |
+
"loss": 5.6781,
|
10201 |
+
"step": 1456
|
10202 |
+
},
|
10203 |
+
{
|
10204 |
+
"epoch": 0.059538646997527735,
|
10205 |
+
"grad_norm": 10.926990509033203,
|
10206 |
+
"learning_rate": 9.644489408248637e-07,
|
10207 |
+
"loss": 5.3213,
|
10208 |
+
"step": 1457
|
10209 |
+
},
|
10210 |
+
{
|
10211 |
+
"epoch": 0.05957951085957134,
|
10212 |
+
"grad_norm": 13.176186561584473,
|
10213 |
+
"learning_rate": 9.447284363234055e-07,
|
10214 |
+
"loss": 6.2745,
|
10215 |
+
"step": 1458
|
10216 |
+
},
|
10217 |
+
{
|
10218 |
+
"epoch": 0.05962037472161494,
|
10219 |
+
"grad_norm": 12.561846733093262,
|
10220 |
+
"learning_rate": 9.252097138326087e-07,
|
10221 |
+
"loss": 6.2479,
|
10222 |
+
"step": 1459
|
10223 |
+
},
|
10224 |
+
{
|
10225 |
+
"epoch": 0.059661238583658543,
|
10226 |
+
"grad_norm": 12.38630199432373,
|
10227 |
+
"learning_rate": 9.058928536400058e-07,
|
10228 |
+
"loss": 5.2524,
|
10229 |
+
"step": 1460
|
10230 |
+
},
|
10231 |
+
{
|
10232 |
+
"epoch": 0.05970210244570214,
|
10233 |
+
"grad_norm": 11.330286979675293,
|
10234 |
+
"learning_rate": 8.867779352028205e-07,
|
10235 |
+
"loss": 5.8127,
|
10236 |
+
"step": 1461
|
10237 |
+
},
|
10238 |
+
{
|
10239 |
+
"epoch": 0.05974296630774575,
|
10240 |
+
"grad_norm": 11.38916301727295,
|
10241 |
+
"learning_rate": 8.678650371475916e-07,
|
10242 |
+
"loss": 5.6307,
|
10243 |
+
"step": 1462
|
10244 |
+
},
|
10245 |
+
{
|
10246 |
+
"epoch": 0.059783830169789345,
|
10247 |
+
"grad_norm": 12.12108325958252,
|
10248 |
+
"learning_rate": 8.491542372698835e-07,
|
10249 |
+
"loss": 6.1368,
|
10250 |
+
"step": 1463
|
10251 |
+
},
|
10252 |
+
{
|
10253 |
+
"epoch": 0.05982469403183295,
|
10254 |
+
"grad_norm": 11.852420806884766,
|
10255 |
+
"learning_rate": 8.30645612533959e-07,
|
10256 |
+
"loss": 5.4171,
|
10257 |
+
"step": 1464
|
10258 |
+
},
|
10259 |
+
{
|
10260 |
+
"epoch": 0.05986555789387655,
|
10261 |
+
"grad_norm": 11.031457901000977,
|
10262 |
+
"learning_rate": 8.123392390724682e-07,
|
10263 |
+
"loss": 4.8049,
|
10264 |
+
"step": 1465
|
10265 |
+
},
|
10266 |
+
{
|
10267 |
+
"epoch": 0.059906421755920154,
|
10268 |
+
"grad_norm": 9.570330619812012,
|
10269 |
+
"learning_rate": 7.942351921861102e-07,
|
10270 |
+
"loss": 4.3052,
|
10271 |
+
"step": 1466
|
10272 |
+
},
|
10273 |
+
{
|
10274 |
+
"epoch": 0.05994728561796375,
|
10275 |
+
"grad_norm": 12.23093032836914,
|
10276 |
+
"learning_rate": 7.763335463433718e-07,
|
10277 |
+
"loss": 5.7314,
|
10278 |
+
"step": 1467
|
10279 |
+
},
|
10280 |
+
{
|
10281 |
+
"epoch": 0.05998814948000736,
|
10282 |
+
"grad_norm": 12.750445365905762,
|
10283 |
+
"learning_rate": 7.586343751801506e-07,
|
10284 |
+
"loss": 5.6883,
|
10285 |
+
"step": 1468
|
10286 |
+
},
|
10287 |
+
{
|
10288 |
+
"epoch": 0.060029013342050956,
|
10289 |
+
"grad_norm": 15.570301055908203,
|
10290 |
+
"learning_rate": 7.411377514995432e-07,
|
10291 |
+
"loss": 5.0826,
|
10292 |
+
"step": 1469
|
10293 |
+
},
|
10294 |
+
{
|
10295 |
+
"epoch": 0.06006987720409456,
|
10296 |
+
"grad_norm": 11.989043235778809,
|
10297 |
+
"learning_rate": 7.238437472714466e-07,
|
10298 |
+
"loss": 5.7435,
|
10299 |
+
"step": 1470
|
10300 |
+
},
|
10301 |
+
{
|
10302 |
+
"epoch": 0.06011074106613816,
|
10303 |
+
"grad_norm": 13.558979034423828,
|
10304 |
+
"learning_rate": 7.067524336323406e-07,
|
10305 |
+
"loss": 5.4195,
|
10306 |
+
"step": 1471
|
10307 |
+
},
|
10308 |
+
{
|
10309 |
+
"epoch": 0.060151604928181765,
|
10310 |
+
"grad_norm": 12.950581550598145,
|
10311 |
+
"learning_rate": 6.898638808849556e-07,
|
10312 |
+
"loss": 6.1003,
|
10313 |
+
"step": 1472
|
10314 |
+
},
|
10315 |
+
{
|
10316 |
+
"epoch": 0.06019246879022536,
|
10317 |
+
"grad_norm": 13.448431968688965,
|
10318 |
+
"learning_rate": 6.731781584980001e-07,
|
10319 |
+
"loss": 6.4803,
|
10320 |
+
"step": 1473
|
10321 |
+
},
|
10322 |
+
{
|
10323 |
+
"epoch": 0.06023333265226897,
|
10324 |
+
"grad_norm": 11.66065788269043,
|
10325 |
+
"learning_rate": 6.566953351058669e-07,
|
10326 |
+
"loss": 5.1155,
|
10327 |
+
"step": 1474
|
10328 |
+
},
|
10329 |
+
{
|
10330 |
+
"epoch": 0.06027419651431257,
|
10331 |
+
"grad_norm": 14.802644729614258,
|
10332 |
+
"learning_rate": 6.404154785083383e-07,
|
10333 |
+
"loss": 6.7994,
|
10334 |
+
"step": 1475
|
10335 |
+
},
|
10336 |
+
{
|
10337 |
+
"epoch": 0.06031506037635617,
|
10338 |
+
"grad_norm": 18.10548973083496,
|
10339 |
+
"learning_rate": 6.243386556703312e-07,
|
10340 |
+
"loss": 5.5827,
|
10341 |
+
"step": 1476
|
10342 |
+
},
|
10343 |
+
{
|
10344 |
+
"epoch": 0.06035592423839977,
|
10345 |
+
"grad_norm": 11.930153846740723,
|
10346 |
+
"learning_rate": 6.084649327216141e-07,
|
10347 |
+
"loss": 5.8416,
|
10348 |
+
"step": 1477
|
10349 |
+
},
|
10350 |
+
{
|
10351 |
+
"epoch": 0.060396788100443376,
|
10352 |
+
"grad_norm": 12.74058723449707,
|
10353 |
+
"learning_rate": 5.927943749565401e-07,
|
10354 |
+
"loss": 5.9192,
|
10355 |
+
"step": 1478
|
10356 |
+
},
|
10357 |
+
{
|
10358 |
+
"epoch": 0.060437651962486974,
|
10359 |
+
"grad_norm": 12.323589324951172,
|
10360 |
+
"learning_rate": 5.773270468337533e-07,
|
10361 |
+
"loss": 5.3801,
|
10362 |
+
"step": 1479
|
10363 |
+
},
|
10364 |
+
{
|
10365 |
+
"epoch": 0.06047851582453058,
|
10366 |
+
"grad_norm": 12.76671028137207,
|
10367 |
+
"learning_rate": 5.62063011975944e-07,
|
10368 |
+
"loss": 5.2271,
|
10369 |
+
"step": 1480
|
10370 |
+
},
|
10371 |
+
{
|
10372 |
+
"epoch": 0.06051937968657418,
|
10373 |
+
"grad_norm": 13.657951354980469,
|
10374 |
+
"learning_rate": 5.470023331695939e-07,
|
10375 |
+
"loss": 6.1542,
|
10376 |
+
"step": 1481
|
10377 |
+
},
|
10378 |
+
{
|
10379 |
+
"epoch": 0.06056024354861778,
|
10380 |
+
"grad_norm": 12.739529609680176,
|
10381 |
+
"learning_rate": 5.321450723646982e-07,
|
10382 |
+
"loss": 5.4657,
|
10383 |
+
"step": 1482
|
10384 |
+
},
|
10385 |
+
{
|
10386 |
+
"epoch": 0.06060110741066138,
|
10387 |
+
"grad_norm": 14.063922882080078,
|
10388 |
+
"learning_rate": 5.174912906745321e-07,
|
10389 |
+
"loss": 6.0558,
|
10390 |
+
"step": 1483
|
10391 |
+
},
|
10392 |
+
{
|
10393 |
+
"epoch": 0.060641971272704986,
|
10394 |
+
"grad_norm": 12.476627349853516,
|
10395 |
+
"learning_rate": 5.0304104837538e-07,
|
10396 |
+
"loss": 6.0374,
|
10397 |
+
"step": 1484
|
10398 |
+
},
|
10399 |
+
{
|
10400 |
+
"epoch": 0.060682835134748585,
|
10401 |
+
"grad_norm": 16.759687423706055,
|
10402 |
+
"learning_rate": 4.887944049062843e-07,
|
10403 |
+
"loss": 8.0454,
|
10404 |
+
"step": 1485
|
10405 |
+
},
|
10406 |
+
{
|
10407 |
+
"epoch": 0.06072369899679219,
|
10408 |
+
"grad_norm": 13.86631965637207,
|
10409 |
+
"learning_rate": 4.747514188688351e-07,
|
10410 |
+
"loss": 5.8759,
|
10411 |
+
"step": 1486
|
10412 |
+
},
|
10413 |
+
{
|
10414 |
+
"epoch": 0.06076456285883579,
|
10415 |
+
"grad_norm": 12.774024963378906,
|
10416 |
+
"learning_rate": 4.6091214802689295e-07,
|
10417 |
+
"loss": 4.5233,
|
10418 |
+
"step": 1487
|
10419 |
+
},
|
10420 |
+
{
|
10421 |
+
"epoch": 0.060805426720879394,
|
10422 |
+
"grad_norm": 15.54319953918457,
|
10423 |
+
"learning_rate": 4.4727664930636624e-07,
|
10424 |
+
"loss": 5.6707,
|
10425 |
+
"step": 1488
|
10426 |
+
},
|
10427 |
+
{
|
10428 |
+
"epoch": 0.06084629058292299,
|
10429 |
+
"grad_norm": 13.094404220581055,
|
10430 |
+
"learning_rate": 4.3384497879497274e-07,
|
10431 |
+
"loss": 5.5513,
|
10432 |
+
"step": 1489
|
10433 |
+
},
|
10434 |
+
{
|
10435 |
+
"epoch": 0.0608871544449666,
|
10436 |
+
"grad_norm": 13.967459678649902,
|
10437 |
+
"learning_rate": 4.206171917420121e-07,
|
10438 |
+
"loss": 5.4522,
|
10439 |
+
"step": 1490
|
10440 |
+
},
|
10441 |
+
{
|
10442 |
+
"epoch": 0.060928018307010195,
|
10443 |
+
"grad_norm": 15.86069107055664,
|
10444 |
+
"learning_rate": 4.075933425581491e-07,
|
10445 |
+
"loss": 5.9506,
|
10446 |
+
"step": 1491
|
10447 |
+
},
|
10448 |
+
{
|
10449 |
+
"epoch": 0.060968882169053794,
|
10450 |
+
"grad_norm": 18.132307052612305,
|
10451 |
+
"learning_rate": 3.9477348481515853e-07,
|
10452 |
+
"loss": 7.6329,
|
10453 |
+
"step": 1492
|
10454 |
+
},
|
10455 |
+
{
|
10456 |
+
"epoch": 0.0610097460310974,
|
10457 |
+
"grad_norm": 17.080135345458984,
|
10458 |
+
"learning_rate": 3.8215767124573643e-07,
|
10459 |
+
"loss": 6.0841,
|
10460 |
+
"step": 1493
|
10461 |
+
},
|
10462 |
+
{
|
10463 |
+
"epoch": 0.061050609893141,
|
10464 |
+
"grad_norm": 19.462413787841797,
|
10465 |
+
"learning_rate": 3.6974595374328345e-07,
|
10466 |
+
"loss": 6.2101,
|
10467 |
+
"step": 1494
|
10468 |
+
},
|
10469 |
+
{
|
10470 |
+
"epoch": 0.0610914737551846,
|
10471 |
+
"grad_norm": 11.74515438079834,
|
10472 |
+
"learning_rate": 3.575383833616497e-07,
|
10473 |
+
"loss": 4.7436,
|
10474 |
+
"step": 1495
|
10475 |
+
},
|
10476 |
+
{
|
10477 |
+
"epoch": 0.0611323376172282,
|
10478 |
+
"grad_norm": 16.021833419799805,
|
10479 |
+
"learning_rate": 3.4553501031497906e-07,
|
10480 |
+
"loss": 6.0138,
|
10481 |
+
"step": 1496
|
10482 |
+
},
|
10483 |
+
{
|
10484 |
+
"epoch": 0.061173201479271806,
|
10485 |
+
"grad_norm": 22.23221778869629,
|
10486 |
+
"learning_rate": 3.337358839774707e-07,
|
10487 |
+
"loss": 7.1393,
|
10488 |
+
"step": 1497
|
10489 |
+
},
|
10490 |
+
{
|
10491 |
+
"epoch": 0.061214065341315405,
|
10492 |
+
"grad_norm": 18.788055419921875,
|
10493 |
+
"learning_rate": 3.2214105288318474e-07,
|
10494 |
+
"loss": 7.0255,
|
10495 |
+
"step": 1498
|
10496 |
+
},
|
10497 |
+
{
|
10498 |
+
"epoch": 0.06125492920335901,
|
10499 |
+
"grad_norm": 18.50741195678711,
|
10500 |
+
"learning_rate": 3.1075056472583686e-07,
|
10501 |
+
"loss": 5.0141,
|
10502 |
+
"step": 1499
|
10503 |
+
},
|
10504 |
+
{
|
10505 |
+
"epoch": 0.06129579306540261,
|
10506 |
+
"grad_norm": 25.534204483032227,
|
10507 |
+
"learning_rate": 2.9956446635861503e-07,
|
10508 |
+
"loss": 6.6527,
|
10509 |
+
"step": 1500
|
10510 |
+
},
|
10511 |
+
{
|
10512 |
+
"epoch": 0.06133665692744621,
|
10513 |
+
"grad_norm": 10.7308988571167,
|
10514 |
+
"learning_rate": 2.8858280379396306e-07,
|
10515 |
+
"loss": 5.4428,
|
10516 |
+
"step": 1501
|
10517 |
+
},
|
10518 |
+
{
|
10519 |
+
"epoch": 0.06137752078948981,
|
10520 |
+
"grad_norm": 10.015771865844727,
|
10521 |
+
"learning_rate": 2.778056222034253e-07,
|
10522 |
+
"loss": 5.2409,
|
10523 |
+
"step": 1502
|
10524 |
+
},
|
10525 |
+
{
|
10526 |
+
"epoch": 0.06141838465153342,
|
10527 |
+
"grad_norm": 11.2932710647583,
|
10528 |
+
"learning_rate": 2.6723296591742996e-07,
|
10529 |
+
"loss": 5.673,
|
10530 |
+
"step": 1503
|
10531 |
+
},
|
10532 |
+
{
|
10533 |
+
"epoch": 0.061459248513577015,
|
10534 |
+
"grad_norm": 9.384864807128906,
|
10535 |
+
"learning_rate": 2.5686487842512816e-07,
|
10536 |
+
"loss": 4.8163,
|
10537 |
+
"step": 1504
|
10538 |
+
},
|
10539 |
+
{
|
10540 |
+
"epoch": 0.06150011237562062,
|
10541 |
+
"grad_norm": 11.913911819458008,
|
10542 |
+
"learning_rate": 2.467014023741943e-07,
|
10543 |
+
"loss": 5.8597,
|
10544 |
+
"step": 1505
|
10545 |
+
},
|
10546 |
+
{
|
10547 |
+
"epoch": 0.06154097623766422,
|
10548 |
+
"grad_norm": 10.451993942260742,
|
10549 |
+
"learning_rate": 2.3674257957067013e-07,
|
10550 |
+
"loss": 5.3339,
|
10551 |
+
"step": 1506
|
10552 |
+
},
|
10553 |
+
{
|
10554 |
+
"epoch": 0.061581840099707824,
|
10555 |
+
"grad_norm": 10.381134986877441,
|
10556 |
+
"learning_rate": 2.269884509787823e-07,
|
10557 |
+
"loss": 4.9778,
|
10558 |
+
"step": 1507
|
10559 |
+
},
|
10560 |
+
{
|
10561 |
+
"epoch": 0.06162270396175142,
|
10562 |
+
"grad_norm": 12.811723709106445,
|
10563 |
+
"learning_rate": 2.1743905672077513e-07,
|
10564 |
+
"loss": 7.1338,
|
10565 |
+
"step": 1508
|
10566 |
+
},
|
10567 |
+
{
|
10568 |
+
"epoch": 0.06166356782379503,
|
10569 |
+
"grad_norm": 10.494141578674316,
|
10570 |
+
"learning_rate": 2.0809443607675006e-07,
|
10571 |
+
"loss": 5.1256,
|
10572 |
+
"step": 1509
|
10573 |
+
},
|
10574 |
+
{
|
10575 |
+
"epoch": 0.061704431685838626,
|
10576 |
+
"grad_norm": 11.155356407165527,
|
10577 |
+
"learning_rate": 1.9895462748450443e-07,
|
10578 |
+
"loss": 5.896,
|
10579 |
+
"step": 1510
|
10580 |
+
},
|
10581 |
+
{
|
10582 |
+
"epoch": 0.06174529554788223,
|
10583 |
+
"grad_norm": 11.229161262512207,
|
10584 |
+
"learning_rate": 1.9001966853935404e-07,
|
10585 |
+
"loss": 4.8591,
|
10586 |
+
"step": 1511
|
10587 |
+
},
|
10588 |
+
{
|
10589 |
+
"epoch": 0.06178615940992583,
|
10590 |
+
"grad_norm": 11.449837684631348,
|
10591 |
+
"learning_rate": 1.812895959940164e-07,
|
10592 |
+
"loss": 6.071,
|
10593 |
+
"step": 1512
|
10594 |
+
},
|
10595 |
+
{
|
10596 |
+
"epoch": 0.061827023271969435,
|
10597 |
+
"grad_norm": 10.819708824157715,
|
10598 |
+
"learning_rate": 1.727644457584221e-07,
|
10599 |
+
"loss": 5.2497,
|
10600 |
+
"step": 1513
|
10601 |
+
},
|
10602 |
+
{
|
10603 |
+
"epoch": 0.06186788713401303,
|
10604 |
+
"grad_norm": 12.558652877807617,
|
10605 |
+
"learning_rate": 1.6444425289958155e-07,
|
10606 |
+
"loss": 5.994,
|
10607 |
+
"step": 1514
|
10608 |
+
},
|
10609 |
+
{
|
10610 |
+
"epoch": 0.06190875099605664,
|
10611 |
+
"grad_norm": 12.91252326965332,
|
10612 |
+
"learning_rate": 1.5632905164145172e-07,
|
10613 |
+
"loss": 5.4362,
|
10614 |
+
"step": 1515
|
10615 |
+
},
|
10616 |
+
{
|
10617 |
+
"epoch": 0.06194961485810024,
|
10618 |
+
"grad_norm": 13.53883171081543,
|
10619 |
+
"learning_rate": 1.4841887536478082e-07,
|
10620 |
+
"loss": 5.7484,
|
10621 |
+
"step": 1516
|
10622 |
+
},
|
10623 |
+
{
|
10624 |
+
"epoch": 0.06199047872014384,
|
10625 |
+
"grad_norm": 11.971946716308594,
|
10626 |
+
"learning_rate": 1.407137566069694e-07,
|
10627 |
+
"loss": 5.6425,
|
10628 |
+
"step": 1517
|
10629 |
+
},
|
10630 |
+
{
|
10631 |
+
"epoch": 0.06203134258218744,
|
10632 |
+
"grad_norm": 14.9813814163208,
|
10633 |
+
"learning_rate": 1.332137270619538e-07,
|
10634 |
+
"loss": 6.353,
|
10635 |
+
"step": 1518
|
10636 |
+
},
|
10637 |
+
{
|
10638 |
+
"epoch": 0.062072206444231046,
|
10639 |
+
"grad_norm": 10.741629600524902,
|
10640 |
+
"learning_rate": 1.2591881758005076e-07,
|
10641 |
+
"loss": 5.6977,
|
10642 |
+
"step": 1519
|
10643 |
+
},
|
10644 |
+
{
|
10645 |
+
"epoch": 0.062113070306274644,
|
10646 |
+
"grad_norm": 15.596894264221191,
|
10647 |
+
"learning_rate": 1.188290581678575e-07,
|
10648 |
+
"loss": 6.2054,
|
10649 |
+
"step": 1520
|
10650 |
+
},
|
10651 |
+
{
|
10652 |
+
"epoch": 0.06215393416831825,
|
10653 |
+
"grad_norm": 12.907197952270508,
|
10654 |
+
"learning_rate": 1.1194447798811291e-07,
|
10655 |
+
"loss": 7.102,
|
10656 |
+
"step": 1521
|
10657 |
+
},
|
10658 |
+
{
|
10659 |
+
"epoch": 0.06219479803036185,
|
10660 |
+
"grad_norm": 11.445554733276367,
|
10661 |
+
"learning_rate": 1.052651053595699e-07,
|
10662 |
+
"loss": 5.667,
|
10663 |
+
"step": 1522
|
10664 |
+
},
|
10665 |
+
{
|
10666 |
+
"epoch": 0.06223566189240545,
|
10667 |
+
"grad_norm": 11.226521492004395,
|
10668 |
+
"learning_rate": 9.879096775689545e-08,
|
10669 |
+
"loss": 4.7355,
|
10670 |
+
"step": 1523
|
10671 |
+
},
|
10672 |
+
{
|
10673 |
+
"epoch": 0.06227652575444905,
|
10674 |
+
"grad_norm": 12.492834091186523,
|
10675 |
+
"learning_rate": 9.25220918105485e-08,
|
10676 |
+
"loss": 5.8962,
|
10677 |
+
"step": 1524
|
10678 |
+
},
|
10679 |
+
{
|
10680 |
+
"epoch": 0.062317389616492656,
|
10681 |
+
"grad_norm": 14.047611236572266,
|
10682 |
+
"learning_rate": 8.64585033066856e-08,
|
10683 |
+
"loss": 6.5156,
|
10684 |
+
"step": 1525
|
10685 |
+
},
|
10686 |
+
{
|
10687 |
+
"epoch": 0.062358253478536255,
|
10688 |
+
"grad_norm": 12.739624977111816,
|
10689 |
+
"learning_rate": 8.060022718702209e-08,
|
10690 |
+
"loss": 6.2031,
|
10691 |
+
"step": 1526
|
10692 |
+
},
|
10693 |
+
{
|
10694 |
+
"epoch": 0.06239911734057986,
|
10695 |
+
"grad_norm": 12.041427612304688,
|
10696 |
+
"learning_rate": 7.494728754876002e-08,
|
10697 |
+
"loss": 5.4776,
|
10698 |
+
"step": 1527
|
10699 |
+
},
|
10700 |
+
{
|
10701 |
+
"epoch": 0.06243998120262346,
|
10702 |
+
"grad_norm": 15.399492263793945,
|
10703 |
+
"learning_rate": 6.949970764448254e-08,
|
10704 |
+
"loss": 6.6556,
|
10705 |
+
"step": 1528
|
10706 |
+
},
|
10707 |
+
{
|
10708 |
+
"epoch": 0.062480845064667064,
|
10709 |
+
"grad_norm": 12.621404647827148,
|
10710 |
+
"learning_rate": 6.425750988204304e-08,
|
10711 |
+
"loss": 5.2749,
|
10712 |
+
"step": 1529
|
10713 |
+
},
|
10714 |
+
{
|
10715 |
+
"epoch": 0.06252170892671066,
|
10716 |
+
"grad_norm": 14.159551620483398,
|
10717 |
+
"learning_rate": 5.922071582449285e-08,
|
10718 |
+
"loss": 5.4779,
|
10719 |
+
"step": 1530
|
10720 |
+
},
|
10721 |
+
{
|
10722 |
+
"epoch": 0.06256257278875427,
|
10723 |
+
"grad_norm": 12.370981216430664,
|
10724 |
+
"learning_rate": 5.438934618998137e-08,
|
10725 |
+
"loss": 5.6499,
|
10726 |
+
"step": 1531
|
10727 |
+
},
|
10728 |
+
{
|
10729 |
+
"epoch": 0.06260343665079787,
|
10730 |
+
"grad_norm": 15.384753227233887,
|
10731 |
+
"learning_rate": 4.976342085167285e-08,
|
10732 |
+
"loss": 6.5493,
|
10733 |
+
"step": 1532
|
10734 |
+
},
|
10735 |
+
{
|
10736 |
+
"epoch": 0.06264430051284146,
|
10737 |
+
"grad_norm": 13.284728050231934,
|
10738 |
+
"learning_rate": 4.534295883766859e-08,
|
10739 |
+
"loss": 6.45,
|
10740 |
+
"step": 1533
|
10741 |
+
},
|
10742 |
+
{
|
10743 |
+
"epoch": 0.06268516437488507,
|
10744 |
+
"grad_norm": 16.89252281188965,
|
10745 |
+
"learning_rate": 4.112797833091264e-08,
|
10746 |
+
"loss": 7.0135,
|
10747 |
+
"step": 1534
|
10748 |
+
},
|
10749 |
+
{
|
10750 |
+
"epoch": 0.06272602823692867,
|
10751 |
+
"grad_norm": 17.233299255371094,
|
10752 |
+
"learning_rate": 3.7118496669147354e-08,
|
10753 |
+
"loss": 6.9018,
|
10754 |
+
"step": 1535
|
10755 |
+
},
|
10756 |
+
{
|
10757 |
+
"epoch": 0.06276689209897228,
|
10758 |
+
"grad_norm": 15.176739692687988,
|
10759 |
+
"learning_rate": 3.3314530344807916e-08,
|
10760 |
+
"loss": 6.1082,
|
10761 |
+
"step": 1536
|
10762 |
+
},
|
10763 |
+
{
|
10764 |
+
"epoch": 0.06280775596101587,
|
10765 |
+
"grad_norm": 14.497002601623535,
|
10766 |
+
"learning_rate": 2.9716095004989064e-08,
|
10767 |
+
"loss": 6.0441,
|
10768 |
+
"step": 1537
|
10769 |
+
},
|
10770 |
+
{
|
10771 |
+
"epoch": 0.06284861982305948,
|
10772 |
+
"grad_norm": 16.184877395629883,
|
10773 |
+
"learning_rate": 2.6323205451345145e-08,
|
10774 |
+
"loss": 6.2948,
|
10775 |
+
"step": 1538
|
10776 |
+
},
|
10777 |
+
{
|
10778 |
+
"epoch": 0.06288948368510308,
|
10779 |
+
"grad_norm": 13.436396598815918,
|
10780 |
+
"learning_rate": 2.3135875640051264e-08,
|
10781 |
+
"loss": 5.5434,
|
10782 |
+
"step": 1539
|
10783 |
+
},
|
10784 |
+
{
|
10785 |
+
"epoch": 0.06293034754714669,
|
10786 |
+
"grad_norm": 13.344731330871582,
|
10787 |
+
"learning_rate": 2.015411868175332e-08,
|
10788 |
+
"loss": 4.7574,
|
10789 |
+
"step": 1540
|
10790 |
+
},
|
10791 |
+
{
|
10792 |
+
"epoch": 0.06297121140919028,
|
10793 |
+
"grad_norm": 14.811240196228027,
|
10794 |
+
"learning_rate": 1.737794684148475e-08,
|
10795 |
+
"loss": 5.6094,
|
10796 |
+
"step": 1541
|
10797 |
+
},
|
10798 |
+
{
|
10799 |
+
"epoch": 0.06301207527123388,
|
10800 |
+
"grad_norm": 18.071805953979492,
|
10801 |
+
"learning_rate": 1.480737153864431e-08,
|
10802 |
+
"loss": 6.1633,
|
10803 |
+
"step": 1542
|
10804 |
+
},
|
10805 |
+
{
|
10806 |
+
"epoch": 0.06305293913327749,
|
10807 |
+
"grad_norm": 17.336023330688477,
|
10808 |
+
"learning_rate": 1.2442403346929476e-08,
|
10809 |
+
"loss": 7.362,
|
10810 |
+
"step": 1543
|
10811 |
+
},
|
10812 |
+
{
|
10813 |
+
"epoch": 0.0630938029953211,
|
10814 |
+
"grad_norm": 20.29914665222168,
|
10815 |
+
"learning_rate": 1.0283051994308678e-08,
|
10816 |
+
"loss": 6.7163,
|
10817 |
+
"step": 1544
|
10818 |
+
},
|
10819 |
+
{
|
10820 |
+
"epoch": 0.06313466685736469,
|
10821 |
+
"grad_norm": 15.721060752868652,
|
10822 |
+
"learning_rate": 8.329326362976897e-09,
|
10823 |
+
"loss": 6.1168,
|
10824 |
+
"step": 1545
|
10825 |
+
},
|
10826 |
+
{
|
10827 |
+
"epoch": 0.06317553071940829,
|
10828 |
+
"grad_norm": 13.263158798217773,
|
10829 |
+
"learning_rate": 6.58123448931125e-09,
|
10830 |
+
"loss": 4.2258,
|
10831 |
+
"step": 1546
|
10832 |
+
},
|
10833 |
+
{
|
10834 |
+
"epoch": 0.0632163945814519,
|
10835 |
+
"grad_norm": 16.77396011352539,
|
10836 |
+
"learning_rate": 5.03878356383769e-09,
|
10837 |
+
"loss": 6.6178,
|
10838 |
+
"step": 1547
|
10839 |
+
},
|
10840 |
+
{
|
10841 |
+
"epoch": 0.0632572584434955,
|
10842 |
+
"grad_norm": 16.796947479248047,
|
10843 |
+
"learning_rate": 3.70197993121435e-09,
|
10844 |
+
"loss": 6.8297,
|
10845 |
+
"step": 1548
|
10846 |
+
},
|
10847 |
+
{
|
10848 |
+
"epoch": 0.06329812230553909,
|
10849 |
+
"grad_norm": 55.745731353759766,
|
10850 |
+
"learning_rate": 2.5708290901982344e-09,
|
10851 |
+
"loss": 5.6831,
|
10852 |
+
"step": 1549
|
10853 |
+
},
|
10854 |
+
{
|
10855 |
+
"epoch": 0.0633389861675827,
|
10856 |
+
"grad_norm": 26.14885711669922,
|
10857 |
+
"learning_rate": 1.645335693623018e-09,
|
10858 |
+
"loss": 6.9226,
|
10859 |
+
"step": 1550
|
10860 |
+
},
|
10861 |
+
{
|
10862 |
+
"epoch": 0.0633798500296263,
|
10863 |
+
"grad_norm": 9.635321617126465,
|
10864 |
+
"learning_rate": 9.25503548371287e-10,
|
10865 |
+
"loss": 6.117,
|
10866 |
+
"step": 1551
|
10867 |
+
},
|
10868 |
+
{
|
10869 |
+
"epoch": 0.06342071389166991,
|
10870 |
+
"grad_norm": 10.165143013000488,
|
10871 |
+
"learning_rate": 4.113356153745418e-10,
|
10872 |
+
"loss": 5.3234,
|
10873 |
+
"step": 1552
|
10874 |
+
},
|
10875 |
+
{
|
10876 |
+
"epoch": 0.0634615777537135,
|
10877 |
+
"grad_norm": 10.81802749633789,
|
10878 |
+
"learning_rate": 1.0283400959099076e-10,
|
10879 |
+
"loss": 5.5497,
|
10880 |
+
"step": 1553
|
10881 |
+
},
|
10882 |
+
{
|
10883 |
+
"epoch": 0.0635024416157571,
|
10884 |
+
"grad_norm": 10.889331817626953,
|
10885 |
+
"learning_rate": 0.0,
|
10886 |
+
"loss": 5.0768,
|
10887 |
+
"step": 1554
|
10888 |
}
|
10889 |
],
|
10890 |
"logging_steps": 1,
|
|
|
10899 |
"should_evaluate": false,
|
10900 |
"should_log": false,
|
10901 |
"should_save": true,
|
10902 |
+
"should_training_stop": true
|
10903 |
},
|
10904 |
"attributes": {}
|
10905 |
}
|
10906 |
},
|
10907 |
+
"total_flos": 1.0899400942461911e+18,
|
10908 |
"train_batch_size": 4,
|
10909 |
"trial_name": null,
|
10910 |
"trial_params": null
|