"auto-commit"
Browse files- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/config.json +0 -0
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/optimizer.pt +1 -1
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/preprocessor_config.json +0 -0
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/pytorch_model.bin +1 -1
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/rng_state.pth +2 -2
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/scaler.pt +1 -1
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/scheduler.pt +1 -1
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/trainer_state.json +799 -4
- model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/training_args.bin +0 -0
- model-bin/finetune/base/log/1629862341.3600242/events.out.tfevents.1629862341.7e498afd5545.905.33 +3 -0
- model-bin/finetune/base/log/1629863001.1370952/events.out.tfevents.1629863001.7e498afd5545.905.35 +3 -0
- model-bin/finetune/base/log/1629863667.3291147/events.out.tfevents.1629863667.7e498afd5545.905.37 +3 -0
- model-bin/finetune/base/log/1629864323.1096034/events.out.tfevents.1629864323.7e498afd5545.905.39 +3 -0
- model-bin/finetune/base/log/1629864981.269791/events.out.tfevents.1629864981.7e498afd5545.905.41 +3 -0
- model-bin/finetune/base/log/events.out.tfevents.1629862341.7e498afd5545.905.32 +3 -0
- model-bin/finetune/base/log/events.out.tfevents.1629863001.7e498afd5545.905.34 +3 -0
- model-bin/finetune/base/log/events.out.tfevents.1629863667.7e498afd5545.905.36 +3 -0
- model-bin/finetune/base/log/events.out.tfevents.1629864323.7e498afd5545.905.38 +3 -0
- model-bin/finetune/base/log/events.out.tfevents.1629864981.7e498afd5545.905.40 +3 -0
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/config.json
RENAMED
File without changes
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/optimizer.pt
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 722165393
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8873d0dc9057248ddbae4c27a249e590cf59025feff462e35911152c444c962c
|
3 |
size 722165393
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/preprocessor_config.json
RENAMED
File without changes
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/pytorch_model.bin
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 377909911
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e52d5cf3e3f0427292831ad23e3d0a522818192c5f3913a56c37d85da5412d96
|
3 |
size 377909911
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/rng_state.pth
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:60f8efa654d8cc6381c5028dd2d25886ea57f54653ecbe454c2265104ddf3403
|
3 |
+
size 14567
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/scaler.pt
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 559
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3415138cb73a5883c98f0bf2c8f14e8096a4e812f12f4501f88e16af49d22e3e
|
3 |
size 559
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/scheduler.pt
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 623
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0e35f035aa5aee1ff28647ebe5126ffe9dcf299b0796aabe04f2764e48e8a871
|
3 |
size 623
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/trainer_state.json
RENAMED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.18412114350410416,
|
3 |
"best_model_checkpoint": "./model-bin/finetune/base/checkpoint-69565",
|
4 |
-
"epoch": 575.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -190242,11 +190242,806 @@
|
|
190242 |
"eval_steps_per_second": 0.663,
|
190243 |
"eval_wer": 0.18538826318909307,
|
190244 |
"step": 71429
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
190245 |
}
|
190246 |
],
|
190247 |
-
"max_steps":
|
190248 |
"num_train_epochs": 5000,
|
190249 |
-
"total_flos": 2.
|
190250 |
"trial_name": null,
|
190251 |
"trial_params": null
|
190252 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.18412114350410416,
|
3 |
"best_model_checkpoint": "./model-bin/finetune/base/checkpoint-69565",
|
4 |
+
"epoch": 575.9960159362549,
|
5 |
+
"global_step": 72051,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
190242 |
"eval_steps_per_second": 0.663,
|
190243 |
"eval_wer": 0.18538826318909307,
|
190244 |
"step": 71429
|
190245 |
+
},
|
190246 |
+
{
|
190247 |
+
"epoch": 571.01,
|
190248 |
+
"learning_rate": 8.87173076923077e-06,
|
190249 |
+
"loss": 0.4145,
|
190250 |
+
"step": 71430
|
190251 |
+
},
|
190252 |
+
{
|
190253 |
+
"epoch": 571.05,
|
190254 |
+
"learning_rate": 8.871650641025642e-06,
|
190255 |
+
"loss": 0.2937,
|
190256 |
+
"step": 71435
|
190257 |
+
},
|
190258 |
+
{
|
190259 |
+
"epoch": 571.09,
|
190260 |
+
"learning_rate": 8.871570512820514e-06,
|
190261 |
+
"loss": 0.301,
|
190262 |
+
"step": 71440
|
190263 |
+
},
|
190264 |
+
{
|
190265 |
+
"epoch": 571.13,
|
190266 |
+
"learning_rate": 8.871490384615385e-06,
|
190267 |
+
"loss": 0.352,
|
190268 |
+
"step": 71445
|
190269 |
+
},
|
190270 |
+
{
|
190271 |
+
"epoch": 571.17,
|
190272 |
+
"learning_rate": 8.871410256410256e-06,
|
190273 |
+
"loss": 0.6479,
|
190274 |
+
"step": 71450
|
190275 |
+
},
|
190276 |
+
{
|
190277 |
+
"epoch": 571.21,
|
190278 |
+
"learning_rate": 8.87133012820513e-06,
|
190279 |
+
"loss": 1.223,
|
190280 |
+
"step": 71455
|
190281 |
+
},
|
190282 |
+
{
|
190283 |
+
"epoch": 571.25,
|
190284 |
+
"learning_rate": 8.87125e-06,
|
190285 |
+
"loss": 0.3032,
|
190286 |
+
"step": 71460
|
190287 |
+
},
|
190288 |
+
{
|
190289 |
+
"epoch": 571.29,
|
190290 |
+
"learning_rate": 8.871169871794872e-06,
|
190291 |
+
"loss": 0.3232,
|
190292 |
+
"step": 71465
|
190293 |
+
},
|
190294 |
+
{
|
190295 |
+
"epoch": 571.33,
|
190296 |
+
"learning_rate": 8.871089743589745e-06,
|
190297 |
+
"loss": 0.3903,
|
190298 |
+
"step": 71470
|
190299 |
+
},
|
190300 |
+
{
|
190301 |
+
"epoch": 571.37,
|
190302 |
+
"learning_rate": 8.871009615384617e-06,
|
190303 |
+
"loss": 0.556,
|
190304 |
+
"step": 71475
|
190305 |
+
},
|
190306 |
+
{
|
190307 |
+
"epoch": 571.41,
|
190308 |
+
"learning_rate": 8.870929487179488e-06,
|
190309 |
+
"loss": 1.2527,
|
190310 |
+
"step": 71480
|
190311 |
+
},
|
190312 |
+
{
|
190313 |
+
"epoch": 571.45,
|
190314 |
+
"learning_rate": 8.87084935897436e-06,
|
190315 |
+
"loss": 0.3457,
|
190316 |
+
"step": 71485
|
190317 |
+
},
|
190318 |
+
{
|
190319 |
+
"epoch": 571.49,
|
190320 |
+
"learning_rate": 8.870769230769232e-06,
|
190321 |
+
"loss": 0.2611,
|
190322 |
+
"step": 71490
|
190323 |
+
},
|
190324 |
+
{
|
190325 |
+
"epoch": 571.53,
|
190326 |
+
"learning_rate": 8.870689102564102e-06,
|
190327 |
+
"loss": 0.3528,
|
190328 |
+
"step": 71495
|
190329 |
+
},
|
190330 |
+
{
|
190331 |
+
"epoch": 571.57,
|
190332 |
+
"learning_rate": 8.870608974358975e-06,
|
190333 |
+
"loss": 0.6128,
|
190334 |
+
"step": 71500
|
190335 |
+
},
|
190336 |
+
{
|
190337 |
+
"epoch": 571.61,
|
190338 |
+
"learning_rate": 8.870528846153846e-06,
|
190339 |
+
"loss": 1.23,
|
190340 |
+
"step": 71505
|
190341 |
+
},
|
190342 |
+
{
|
190343 |
+
"epoch": 571.65,
|
190344 |
+
"learning_rate": 8.87044871794872e-06,
|
190345 |
+
"loss": 0.2925,
|
190346 |
+
"step": 71510
|
190347 |
+
},
|
190348 |
+
{
|
190349 |
+
"epoch": 571.69,
|
190350 |
+
"learning_rate": 8.870368589743591e-06,
|
190351 |
+
"loss": 0.2735,
|
190352 |
+
"step": 71515
|
190353 |
+
},
|
190354 |
+
{
|
190355 |
+
"epoch": 571.73,
|
190356 |
+
"learning_rate": 8.870288461538462e-06,
|
190357 |
+
"loss": 0.3287,
|
190358 |
+
"step": 71520
|
190359 |
+
},
|
190360 |
+
{
|
190361 |
+
"epoch": 571.77,
|
190362 |
+
"learning_rate": 8.870208333333335e-06,
|
190363 |
+
"loss": 0.5729,
|
190364 |
+
"step": 71525
|
190365 |
+
},
|
190366 |
+
{
|
190367 |
+
"epoch": 571.81,
|
190368 |
+
"learning_rate": 8.870144230769233e-06,
|
190369 |
+
"loss": 1.1543,
|
190370 |
+
"step": 71530
|
190371 |
+
},
|
190372 |
+
{
|
190373 |
+
"epoch": 571.85,
|
190374 |
+
"learning_rate": 8.870064102564104e-06,
|
190375 |
+
"loss": 0.2948,
|
190376 |
+
"step": 71535
|
190377 |
+
},
|
190378 |
+
{
|
190379 |
+
"epoch": 571.89,
|
190380 |
+
"learning_rate": 8.869983974358975e-06,
|
190381 |
+
"loss": 0.3476,
|
190382 |
+
"step": 71540
|
190383 |
+
},
|
190384 |
+
{
|
190385 |
+
"epoch": 571.93,
|
190386 |
+
"learning_rate": 8.869903846153847e-06,
|
190387 |
+
"loss": 0.3919,
|
190388 |
+
"step": 71545
|
190389 |
+
},
|
190390 |
+
{
|
190391 |
+
"epoch": 571.97,
|
190392 |
+
"learning_rate": 8.86982371794872e-06,
|
190393 |
+
"loss": 0.5831,
|
190394 |
+
"step": 71550
|
190395 |
+
},
|
190396 |
+
{
|
190397 |
+
"epoch": 572.0,
|
190398 |
+
"eval_loss": 0.39764681458473206,
|
190399 |
+
"eval_runtime": 39.9891,
|
190400 |
+
"eval_samples_per_second": 21.006,
|
190401 |
+
"eval_steps_per_second": 0.675,
|
190402 |
+
"eval_wer": 0.18975681650700074,
|
190403 |
+
"step": 71554
|
190404 |
+
},
|
190405 |
+
{
|
190406 |
+
"epoch": 577.01,
|
190407 |
+
"learning_rate": 8.86974358974359e-06,
|
190408 |
+
"loss": 0.4127,
|
190409 |
+
"step": 71555
|
190410 |
+
},
|
190411 |
+
{
|
190412 |
+
"epoch": 577.05,
|
190413 |
+
"learning_rate": 8.869663461538463e-06,
|
190414 |
+
"loss": 0.3382,
|
190415 |
+
"step": 71560
|
190416 |
+
},
|
190417 |
+
{
|
190418 |
+
"epoch": 577.09,
|
190419 |
+
"learning_rate": 8.869583333333334e-06,
|
190420 |
+
"loss": 0.3803,
|
190421 |
+
"step": 71565
|
190422 |
+
},
|
190423 |
+
{
|
190424 |
+
"epoch": 577.13,
|
190425 |
+
"learning_rate": 8.869503205128205e-06,
|
190426 |
+
"loss": 0.3151,
|
190427 |
+
"step": 71570
|
190428 |
+
},
|
190429 |
+
{
|
190430 |
+
"epoch": 577.17,
|
190431 |
+
"learning_rate": 8.869423076923077e-06,
|
190432 |
+
"loss": 0.5314,
|
190433 |
+
"step": 71575
|
190434 |
+
},
|
190435 |
+
{
|
190436 |
+
"epoch": 577.21,
|
190437 |
+
"learning_rate": 8.86934294871795e-06,
|
190438 |
+
"loss": 1.2319,
|
190439 |
+
"step": 71580
|
190440 |
+
},
|
190441 |
+
{
|
190442 |
+
"epoch": 577.25,
|
190443 |
+
"learning_rate": 8.869262820512821e-06,
|
190444 |
+
"loss": 0.3355,
|
190445 |
+
"step": 71585
|
190446 |
+
},
|
190447 |
+
{
|
190448 |
+
"epoch": 577.29,
|
190449 |
+
"learning_rate": 8.869182692307692e-06,
|
190450 |
+
"loss": 0.268,
|
190451 |
+
"step": 71590
|
190452 |
+
},
|
190453 |
+
{
|
190454 |
+
"epoch": 577.33,
|
190455 |
+
"learning_rate": 8.869102564102565e-06,
|
190456 |
+
"loss": 0.3552,
|
190457 |
+
"step": 71595
|
190458 |
+
},
|
190459 |
+
{
|
190460 |
+
"epoch": 577.37,
|
190461 |
+
"learning_rate": 8.869022435897437e-06,
|
190462 |
+
"loss": 0.5807,
|
190463 |
+
"step": 71600
|
190464 |
+
},
|
190465 |
+
{
|
190466 |
+
"epoch": 577.41,
|
190467 |
+
"learning_rate": 8.868942307692308e-06,
|
190468 |
+
"loss": 1.1892,
|
190469 |
+
"step": 71605
|
190470 |
+
},
|
190471 |
+
{
|
190472 |
+
"epoch": 577.45,
|
190473 |
+
"learning_rate": 8.86886217948718e-06,
|
190474 |
+
"loss": 0.3401,
|
190475 |
+
"step": 71610
|
190476 |
+
},
|
190477 |
+
{
|
190478 |
+
"epoch": 577.49,
|
190479 |
+
"learning_rate": 8.868782051282053e-06,
|
190480 |
+
"loss": 0.2897,
|
190481 |
+
"step": 71615
|
190482 |
+
},
|
190483 |
+
{
|
190484 |
+
"epoch": 577.53,
|
190485 |
+
"learning_rate": 8.868701923076924e-06,
|
190486 |
+
"loss": 0.3935,
|
190487 |
+
"step": 71620
|
190488 |
+
},
|
190489 |
+
{
|
190490 |
+
"epoch": 577.57,
|
190491 |
+
"learning_rate": 8.868621794871795e-06,
|
190492 |
+
"loss": 0.5546,
|
190493 |
+
"step": 71625
|
190494 |
+
},
|
190495 |
+
{
|
190496 |
+
"epoch": 577.61,
|
190497 |
+
"learning_rate": 8.868541666666668e-06,
|
190498 |
+
"loss": 1.1703,
|
190499 |
+
"step": 71630
|
190500 |
+
},
|
190501 |
+
{
|
190502 |
+
"epoch": 577.65,
|
190503 |
+
"learning_rate": 8.86846153846154e-06,
|
190504 |
+
"loss": 0.3219,
|
190505 |
+
"step": 71635
|
190506 |
+
},
|
190507 |
+
{
|
190508 |
+
"epoch": 577.69,
|
190509 |
+
"learning_rate": 8.868381410256411e-06,
|
190510 |
+
"loss": 0.2871,
|
190511 |
+
"step": 71640
|
190512 |
+
},
|
190513 |
+
{
|
190514 |
+
"epoch": 577.73,
|
190515 |
+
"learning_rate": 8.868301282051282e-06,
|
190516 |
+
"loss": 0.4877,
|
190517 |
+
"step": 71645
|
190518 |
+
},
|
190519 |
+
{
|
190520 |
+
"epoch": 577.77,
|
190521 |
+
"learning_rate": 8.868221153846155e-06,
|
190522 |
+
"loss": 0.5671,
|
190523 |
+
"step": 71650
|
190524 |
+
},
|
190525 |
+
{
|
190526 |
+
"epoch": 577.81,
|
190527 |
+
"learning_rate": 8.868141025641027e-06,
|
190528 |
+
"loss": 1.1452,
|
190529 |
+
"step": 71655
|
190530 |
+
},
|
190531 |
+
{
|
190532 |
+
"epoch": 577.85,
|
190533 |
+
"learning_rate": 8.868060897435898e-06,
|
190534 |
+
"loss": 0.3415,
|
190535 |
+
"step": 71660
|
190536 |
+
},
|
190537 |
+
{
|
190538 |
+
"epoch": 577.89,
|
190539 |
+
"learning_rate": 8.86798076923077e-06,
|
190540 |
+
"loss": 0.2894,
|
190541 |
+
"step": 71665
|
190542 |
+
},
|
190543 |
+
{
|
190544 |
+
"epoch": 577.93,
|
190545 |
+
"learning_rate": 8.867900641025643e-06,
|
190546 |
+
"loss": 0.406,
|
190547 |
+
"step": 71670
|
190548 |
+
},
|
190549 |
+
{
|
190550 |
+
"epoch": 577.97,
|
190551 |
+
"learning_rate": 8.867820512820512e-06,
|
190552 |
+
"loss": 0.5532,
|
190553 |
+
"step": 71675
|
190554 |
+
},
|
190555 |
+
{
|
190556 |
+
"epoch": 578.0,
|
190557 |
+
"eval_loss": 0.4131166636943817,
|
190558 |
+
"eval_runtime": 39.7237,
|
190559 |
+
"eval_samples_per_second": 21.146,
|
190560 |
+
"eval_steps_per_second": 0.68,
|
190561 |
+
"eval_wer": 0.19386475923604454,
|
190562 |
+
"step": 71678
|
190563 |
+
},
|
190564 |
+
{
|
190565 |
+
"epoch": 578.02,
|
190566 |
+
"learning_rate": 8.867740384615385e-06,
|
190567 |
+
"loss": 0.3303,
|
190568 |
+
"step": 71680
|
190569 |
+
},
|
190570 |
+
{
|
190571 |
+
"epoch": 578.06,
|
190572 |
+
"learning_rate": 8.867660256410258e-06,
|
190573 |
+
"loss": 0.333,
|
190574 |
+
"step": 71685
|
190575 |
+
},
|
190576 |
+
{
|
190577 |
+
"epoch": 578.1,
|
190578 |
+
"learning_rate": 8.867580128205128e-06,
|
190579 |
+
"loss": 0.3693,
|
190580 |
+
"step": 71690
|
190581 |
+
},
|
190582 |
+
{
|
190583 |
+
"epoch": 578.14,
|
190584 |
+
"learning_rate": 8.867500000000001e-06,
|
190585 |
+
"loss": 0.3573,
|
190586 |
+
"step": 71695
|
190587 |
+
},
|
190588 |
+
{
|
190589 |
+
"epoch": 578.18,
|
190590 |
+
"learning_rate": 8.867419871794872e-06,
|
190591 |
+
"loss": 0.6273,
|
190592 |
+
"step": 71700
|
190593 |
+
},
|
190594 |
+
{
|
190595 |
+
"epoch": 578.22,
|
190596 |
+
"learning_rate": 8.867339743589744e-06,
|
190597 |
+
"loss": 1.0727,
|
190598 |
+
"step": 71705
|
190599 |
+
},
|
190600 |
+
{
|
190601 |
+
"epoch": 578.26,
|
190602 |
+
"learning_rate": 8.867259615384615e-06,
|
190603 |
+
"loss": 0.3103,
|
190604 |
+
"step": 71710
|
190605 |
+
},
|
190606 |
+
{
|
190607 |
+
"epoch": 578.3,
|
190608 |
+
"learning_rate": 8.867179487179488e-06,
|
190609 |
+
"loss": 0.3671,
|
190610 |
+
"step": 71715
|
190611 |
+
},
|
190612 |
+
{
|
190613 |
+
"epoch": 578.34,
|
190614 |
+
"learning_rate": 8.86709935897436e-06,
|
190615 |
+
"loss": 0.3691,
|
190616 |
+
"step": 71720
|
190617 |
+
},
|
190618 |
+
{
|
190619 |
+
"epoch": 578.38,
|
190620 |
+
"learning_rate": 8.867019230769231e-06,
|
190621 |
+
"loss": 0.6489,
|
190622 |
+
"step": 71725
|
190623 |
+
},
|
190624 |
+
{
|
190625 |
+
"epoch": 578.42,
|
190626 |
+
"learning_rate": 8.866939102564104e-06,
|
190627 |
+
"loss": 1.029,
|
190628 |
+
"step": 71730
|
190629 |
+
},
|
190630 |
+
{
|
190631 |
+
"epoch": 578.46,
|
190632 |
+
"learning_rate": 8.866858974358975e-06,
|
190633 |
+
"loss": 0.299,
|
190634 |
+
"step": 71735
|
190635 |
+
},
|
190636 |
+
{
|
190637 |
+
"epoch": 578.5,
|
190638 |
+
"learning_rate": 8.866778846153847e-06,
|
190639 |
+
"loss": 0.307,
|
190640 |
+
"step": 71740
|
190641 |
+
},
|
190642 |
+
{
|
190643 |
+
"epoch": 578.54,
|
190644 |
+
"learning_rate": 8.866698717948718e-06,
|
190645 |
+
"loss": 0.3934,
|
190646 |
+
"step": 71745
|
190647 |
+
},
|
190648 |
+
{
|
190649 |
+
"epoch": 578.58,
|
190650 |
+
"learning_rate": 8.866618589743591e-06,
|
190651 |
+
"loss": 0.6558,
|
190652 |
+
"step": 71750
|
190653 |
+
},
|
190654 |
+
{
|
190655 |
+
"epoch": 578.62,
|
190656 |
+
"learning_rate": 8.866538461538462e-06,
|
190657 |
+
"loss": 1.0735,
|
190658 |
+
"step": 71755
|
190659 |
+
},
|
190660 |
+
{
|
190661 |
+
"epoch": 578.66,
|
190662 |
+
"learning_rate": 8.866458333333334e-06,
|
190663 |
+
"loss": 0.3017,
|
190664 |
+
"step": 71760
|
190665 |
+
},
|
190666 |
+
{
|
190667 |
+
"epoch": 578.7,
|
190668 |
+
"learning_rate": 8.866378205128205e-06,
|
190669 |
+
"loss": 0.4043,
|
190670 |
+
"step": 71765
|
190671 |
+
},
|
190672 |
+
{
|
190673 |
+
"epoch": 578.74,
|
190674 |
+
"learning_rate": 8.866298076923078e-06,
|
190675 |
+
"loss": 0.3836,
|
190676 |
+
"step": 71770
|
190677 |
+
},
|
190678 |
+
{
|
190679 |
+
"epoch": 578.78,
|
190680 |
+
"learning_rate": 8.86621794871795e-06,
|
190681 |
+
"loss": 0.7219,
|
190682 |
+
"step": 71775
|
190683 |
+
},
|
190684 |
+
{
|
190685 |
+
"epoch": 578.82,
|
190686 |
+
"learning_rate": 8.866137820512821e-06,
|
190687 |
+
"loss": 1.1428,
|
190688 |
+
"step": 71780
|
190689 |
+
},
|
190690 |
+
{
|
190691 |
+
"epoch": 578.86,
|
190692 |
+
"learning_rate": 8.866057692307694e-06,
|
190693 |
+
"loss": 0.3019,
|
190694 |
+
"step": 71785
|
190695 |
+
},
|
190696 |
+
{
|
190697 |
+
"epoch": 578.9,
|
190698 |
+
"learning_rate": 8.865977564102565e-06,
|
190699 |
+
"loss": 0.3753,
|
190700 |
+
"step": 71790
|
190701 |
+
},
|
190702 |
+
{
|
190703 |
+
"epoch": 578.94,
|
190704 |
+
"learning_rate": 8.865897435897437e-06,
|
190705 |
+
"loss": 0.3985,
|
190706 |
+
"step": 71795
|
190707 |
+
},
|
190708 |
+
{
|
190709 |
+
"epoch": 578.98,
|
190710 |
+
"learning_rate": 8.865817307692308e-06,
|
190711 |
+
"loss": 0.6391,
|
190712 |
+
"step": 71800
|
190713 |
+
},
|
190714 |
+
{
|
190715 |
+
"epoch": 579.0,
|
190716 |
+
"eval_loss": 0.35643306374549866,
|
190717 |
+
"eval_runtime": 40.7091,
|
190718 |
+
"eval_samples_per_second": 20.634,
|
190719 |
+
"eval_steps_per_second": 0.663,
|
190720 |
+
"eval_wer": 0.18603480512009204,
|
190721 |
+
"step": 71802
|
190722 |
+
},
|
190723 |
+
{
|
190724 |
+
"epoch": 579.02,
|
190725 |
+
"learning_rate": 8.865737179487181e-06,
|
190726 |
+
"loss": 0.427,
|
190727 |
+
"step": 71805
|
190728 |
+
},
|
190729 |
+
{
|
190730 |
+
"epoch": 579.06,
|
190731 |
+
"learning_rate": 8.865657051282052e-06,
|
190732 |
+
"loss": 0.308,
|
190733 |
+
"step": 71810
|
190734 |
+
},
|
190735 |
+
{
|
190736 |
+
"epoch": 579.1,
|
190737 |
+
"learning_rate": 8.865576923076924e-06,
|
190738 |
+
"loss": 0.3701,
|
190739 |
+
"step": 71815
|
190740 |
+
},
|
190741 |
+
{
|
190742 |
+
"epoch": 579.15,
|
190743 |
+
"learning_rate": 8.865496794871795e-06,
|
190744 |
+
"loss": 0.4027,
|
190745 |
+
"step": 71820
|
190746 |
+
},
|
190747 |
+
{
|
190748 |
+
"epoch": 579.19,
|
190749 |
+
"learning_rate": 8.865416666666668e-06,
|
190750 |
+
"loss": 0.8272,
|
190751 |
+
"step": 71825
|
190752 |
+
},
|
190753 |
+
{
|
190754 |
+
"epoch": 579.23,
|
190755 |
+
"learning_rate": 8.865336538461538e-06,
|
190756 |
+
"loss": 0.9265,
|
190757 |
+
"step": 71830
|
190758 |
+
},
|
190759 |
+
{
|
190760 |
+
"epoch": 579.27,
|
190761 |
+
"learning_rate": 8.865256410256411e-06,
|
190762 |
+
"loss": 0.3088,
|
190763 |
+
"step": 71835
|
190764 |
+
},
|
190765 |
+
{
|
190766 |
+
"epoch": 579.31,
|
190767 |
+
"learning_rate": 8.865176282051284e-06,
|
190768 |
+
"loss": 0.3262,
|
190769 |
+
"step": 71840
|
190770 |
+
},
|
190771 |
+
{
|
190772 |
+
"epoch": 579.35,
|
190773 |
+
"learning_rate": 8.865096153846154e-06,
|
190774 |
+
"loss": 0.3958,
|
190775 |
+
"step": 71845
|
190776 |
+
},
|
190777 |
+
{
|
190778 |
+
"epoch": 579.39,
|
190779 |
+
"learning_rate": 8.865016025641027e-06,
|
190780 |
+
"loss": 0.8472,
|
190781 |
+
"step": 71850
|
190782 |
+
},
|
190783 |
+
{
|
190784 |
+
"epoch": 579.43,
|
190785 |
+
"learning_rate": 8.864935897435898e-06,
|
190786 |
+
"loss": 0.9926,
|
190787 |
+
"step": 71855
|
190788 |
+
},
|
190789 |
+
{
|
190790 |
+
"epoch": 579.47,
|
190791 |
+
"learning_rate": 8.86485576923077e-06,
|
190792 |
+
"loss": 0.31,
|
190793 |
+
"step": 71860
|
190794 |
+
},
|
190795 |
+
{
|
190796 |
+
"epoch": 579.51,
|
190797 |
+
"learning_rate": 8.86477564102564e-06,
|
190798 |
+
"loss": 0.4103,
|
190799 |
+
"step": 71865
|
190800 |
+
},
|
190801 |
+
{
|
190802 |
+
"epoch": 579.55,
|
190803 |
+
"learning_rate": 8.864695512820514e-06,
|
190804 |
+
"loss": 0.3979,
|
190805 |
+
"step": 71870
|
190806 |
+
},
|
190807 |
+
{
|
190808 |
+
"epoch": 579.59,
|
190809 |
+
"learning_rate": 8.864615384615385e-06,
|
190810 |
+
"loss": 1.1086,
|
190811 |
+
"step": 71875
|
190812 |
+
},
|
190813 |
+
{
|
190814 |
+
"epoch": 579.63,
|
190815 |
+
"learning_rate": 8.864535256410257e-06,
|
190816 |
+
"loss": 0.9069,
|
190817 |
+
"step": 71880
|
190818 |
+
},
|
190819 |
+
{
|
190820 |
+
"epoch": 579.67,
|
190821 |
+
"learning_rate": 8.86445512820513e-06,
|
190822 |
+
"loss": 0.3284,
|
190823 |
+
"step": 71885
|
190824 |
+
},
|
190825 |
+
{
|
190826 |
+
"epoch": 579.71,
|
190827 |
+
"learning_rate": 8.864375000000001e-06,
|
190828 |
+
"loss": 0.321,
|
190829 |
+
"step": 71890
|
190830 |
+
},
|
190831 |
+
{
|
190832 |
+
"epoch": 579.75,
|
190833 |
+
"learning_rate": 8.864294871794872e-06,
|
190834 |
+
"loss": 0.4419,
|
190835 |
+
"step": 71895
|
190836 |
+
},
|
190837 |
+
{
|
190838 |
+
"epoch": 579.79,
|
190839 |
+
"learning_rate": 8.864214743589744e-06,
|
190840 |
+
"loss": 0.842,
|
190841 |
+
"step": 71900
|
190842 |
+
},
|
190843 |
+
{
|
190844 |
+
"epoch": 579.83,
|
190845 |
+
"learning_rate": 8.864134615384617e-06,
|
190846 |
+
"loss": 0.8116,
|
190847 |
+
"step": 71905
|
190848 |
+
},
|
190849 |
+
{
|
190850 |
+
"epoch": 579.87,
|
190851 |
+
"learning_rate": 8.864054487179488e-06,
|
190852 |
+
"loss": 0.2963,
|
190853 |
+
"step": 71910
|
190854 |
+
},
|
190855 |
+
{
|
190856 |
+
"epoch": 579.91,
|
190857 |
+
"learning_rate": 8.86397435897436e-06,
|
190858 |
+
"loss": 0.331,
|
190859 |
+
"step": 71915
|
190860 |
+
},
|
190861 |
+
{
|
190862 |
+
"epoch": 579.95,
|
190863 |
+
"learning_rate": 8.86389423076923e-06,
|
190864 |
+
"loss": 0.4651,
|
190865 |
+
"step": 71920
|
190866 |
+
},
|
190867 |
+
{
|
190868 |
+
"epoch": 579.99,
|
190869 |
+
"learning_rate": 8.863814102564104e-06,
|
190870 |
+
"loss": 1.0331,
|
190871 |
+
"step": 71925
|
190872 |
+
},
|
190873 |
+
{
|
190874 |
+
"epoch": 580.0,
|
190875 |
+
"eval_loss": 0.3950594961643219,
|
190876 |
+
"eval_runtime": 40.0192,
|
190877 |
+
"eval_samples_per_second": 20.99,
|
190878 |
+
"eval_steps_per_second": 0.675,
|
190879 |
+
"eval_wer": 0.18807002059429243,
|
190880 |
+
"step": 71926
|
190881 |
+
},
|
190882 |
+
{
|
190883 |
+
"epoch": 575.03,
|
190884 |
+
"learning_rate": 8.863733974358975e-06,
|
190885 |
+
"loss": 0.3218,
|
190886 |
+
"step": 71930
|
190887 |
+
},
|
190888 |
+
{
|
190889 |
+
"epoch": 575.07,
|
190890 |
+
"learning_rate": 8.863653846153847e-06,
|
190891 |
+
"loss": 0.33,
|
190892 |
+
"step": 71935
|
190893 |
+
},
|
190894 |
+
{
|
190895 |
+
"epoch": 575.11,
|
190896 |
+
"learning_rate": 8.86357371794872e-06,
|
190897 |
+
"loss": 0.3206,
|
190898 |
+
"step": 71940
|
190899 |
+
},
|
190900 |
+
{
|
190901 |
+
"epoch": 575.15,
|
190902 |
+
"learning_rate": 8.863493589743591e-06,
|
190903 |
+
"loss": 0.5113,
|
190904 |
+
"step": 71945
|
190905 |
+
},
|
190906 |
+
{
|
190907 |
+
"epoch": 575.19,
|
190908 |
+
"learning_rate": 8.863413461538462e-06,
|
190909 |
+
"loss": 0.8986,
|
190910 |
+
"step": 71950
|
190911 |
+
},
|
190912 |
+
{
|
190913 |
+
"epoch": 575.23,
|
190914 |
+
"learning_rate": 8.863333333333334e-06,
|
190915 |
+
"loss": 0.6329,
|
190916 |
+
"step": 71955
|
190917 |
+
},
|
190918 |
+
{
|
190919 |
+
"epoch": 575.27,
|
190920 |
+
"learning_rate": 8.863253205128207e-06,
|
190921 |
+
"loss": 0.2978,
|
190922 |
+
"step": 71960
|
190923 |
+
},
|
190924 |
+
{
|
190925 |
+
"epoch": 575.31,
|
190926 |
+
"learning_rate": 8.863173076923076e-06,
|
190927 |
+
"loss": 0.3732,
|
190928 |
+
"step": 71965
|
190929 |
+
},
|
190930 |
+
{
|
190931 |
+
"epoch": 575.35,
|
190932 |
+
"learning_rate": 8.86309294871795e-06,
|
190933 |
+
"loss": 0.4307,
|
190934 |
+
"step": 71970
|
190935 |
+
},
|
190936 |
+
{
|
190937 |
+
"epoch": 575.39,
|
190938 |
+
"learning_rate": 8.863012820512821e-06,
|
190939 |
+
"loss": 0.9785,
|
190940 |
+
"step": 71975
|
190941 |
+
},
|
190942 |
+
{
|
190943 |
+
"epoch": 575.43,
|
190944 |
+
"learning_rate": 8.862932692307692e-06,
|
190945 |
+
"loss": 0.6819,
|
190946 |
+
"step": 71980
|
190947 |
+
},
|
190948 |
+
{
|
190949 |
+
"epoch": 575.47,
|
190950 |
+
"learning_rate": 8.862852564102565e-06,
|
190951 |
+
"loss": 0.3069,
|
190952 |
+
"step": 71985
|
190953 |
+
},
|
190954 |
+
{
|
190955 |
+
"epoch": 575.51,
|
190956 |
+
"learning_rate": 8.862772435897437e-06,
|
190957 |
+
"loss": 0.5598,
|
190958 |
+
"step": 71990
|
190959 |
+
},
|
190960 |
+
{
|
190961 |
+
"epoch": 575.55,
|
190962 |
+
"learning_rate": 8.862692307692308e-06,
|
190963 |
+
"loss": 0.4619,
|
190964 |
+
"step": 71995
|
190965 |
+
},
|
190966 |
+
{
|
190967 |
+
"epoch": 575.59,
|
190968 |
+
"learning_rate": 8.86261217948718e-06,
|
190969 |
+
"loss": 1.044,
|
190970 |
+
"step": 72000
|
190971 |
+
},
|
190972 |
+
{
|
190973 |
+
"epoch": 575.63,
|
190974 |
+
"learning_rate": 8.862532051282052e-06,
|
190975 |
+
"loss": 0.7529,
|
190976 |
+
"step": 72005
|
190977 |
+
},
|
190978 |
+
{
|
190979 |
+
"epoch": 575.67,
|
190980 |
+
"learning_rate": 8.862451923076924e-06,
|
190981 |
+
"loss": 0.2959,
|
190982 |
+
"step": 72010
|
190983 |
+
},
|
190984 |
+
{
|
190985 |
+
"epoch": 575.71,
|
190986 |
+
"learning_rate": 8.862371794871795e-06,
|
190987 |
+
"loss": 0.3042,
|
190988 |
+
"step": 72015
|
190989 |
+
},
|
190990 |
+
{
|
190991 |
+
"epoch": 575.75,
|
190992 |
+
"learning_rate": 8.862291666666666e-06,
|
190993 |
+
"loss": 0.4811,
|
190994 |
+
"step": 72020
|
190995 |
+
},
|
190996 |
+
{
|
190997 |
+
"epoch": 575.79,
|
190998 |
+
"learning_rate": 8.86221153846154e-06,
|
190999 |
+
"loss": 0.9146,
|
191000 |
+
"step": 72025
|
191001 |
+
},
|
191002 |
+
{
|
191003 |
+
"epoch": 575.83,
|
191004 |
+
"learning_rate": 8.862131410256411e-06,
|
191005 |
+
"loss": 0.6554,
|
191006 |
+
"step": 72030
|
191007 |
+
},
|
191008 |
+
{
|
191009 |
+
"epoch": 575.87,
|
191010 |
+
"learning_rate": 8.862051282051282e-06,
|
191011 |
+
"loss": 0.3012,
|
191012 |
+
"step": 72035
|
191013 |
+
},
|
191014 |
+
{
|
191015 |
+
"epoch": 575.91,
|
191016 |
+
"learning_rate": 8.861971153846155e-06,
|
191017 |
+
"loss": 0.3819,
|
191018 |
+
"step": 72040
|
191019 |
+
},
|
191020 |
+
{
|
191021 |
+
"epoch": 575.95,
|
191022 |
+
"learning_rate": 8.861891025641027e-06,
|
191023 |
+
"loss": 0.4464,
|
191024 |
+
"step": 72045
|
191025 |
+
},
|
191026 |
+
{
|
191027 |
+
"epoch": 575.99,
|
191028 |
+
"learning_rate": 8.861810897435898e-06,
|
191029 |
+
"loss": 1.0982,
|
191030 |
+
"step": 72050
|
191031 |
+
},
|
191032 |
+
{
|
191033 |
+
"epoch": 576.0,
|
191034 |
+
"eval_loss": 0.4038601219654083,
|
191035 |
+
"eval_runtime": 38.3503,
|
191036 |
+
"eval_samples_per_second": 21.903,
|
191037 |
+
"eval_steps_per_second": 0.704,
|
191038 |
+
"eval_wer": 0.19118408401745432,
|
191039 |
+
"step": 72051
|
191040 |
}
|
191041 |
],
|
191042 |
+
"max_steps": 625000,
|
191043 |
"num_train_epochs": 5000,
|
191044 |
+
"total_flos": 2.0276195965252e+20,
|
191045 |
"trial_name": null,
|
191046 |
"trial_params": null
|
191047 |
}
|
model-bin/finetune/base/{checkpoint-71429 β checkpoint-72051}/training_args.bin
RENAMED
File without changes
|
model-bin/finetune/base/log/1629862341.3600242/events.out.tfevents.1629862341.7e498afd5545.905.33
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9345f3311d27ba5cbb577fcccf3fd2fd1301aafde1c96dd6538d1f8600e88b8a
|
3 |
+
size 4194
|
model-bin/finetune/base/log/1629863001.1370952/events.out.tfevents.1629863001.7e498afd5545.905.35
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:75b9a457e87216a618f2fc9f110a11d4f9a7837c2cdbd87d2d77540b7e4fee23
|
3 |
+
size 4194
|
model-bin/finetune/base/log/1629863667.3291147/events.out.tfevents.1629863667.7e498afd5545.905.37
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:616868fa5dbafcd6e5cad0f2e8045f1227610207091cbbee4ec0b2c1cbd19047
|
3 |
+
size 4194
|
model-bin/finetune/base/log/1629864323.1096034/events.out.tfevents.1629864323.7e498afd5545.905.39
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9324ea3bfef932dcb0116332bdad524e83616d7c8664b00f13cbe141fe7933fc
|
3 |
+
size 4194
|
model-bin/finetune/base/log/1629864981.269791/events.out.tfevents.1629864981.7e498afd5545.905.41
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aaadade527c749fa165d8f7c487a7630bb7a66971f3706134c995de881aa146b
|
3 |
+
size 4194
|
model-bin/finetune/base/log/events.out.tfevents.1629862341.7e498afd5545.905.32
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:15175e18cd25c16091a0df604577790e9f06c0aca66ff48983b813cf9560a84f
|
3 |
+
size 8622
|
model-bin/finetune/base/log/events.out.tfevents.1629863001.7e498afd5545.905.34
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5937acd1685074bab57160dcb3c1e784e33e8692dd5d7d0bc054d945f67084f3
|
3 |
+
size 8622
|
model-bin/finetune/base/log/events.out.tfevents.1629863667.7e498afd5545.905.36
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4e4f17f7bab0ee3dea35ca9b597a9b5f2c0898d5bef7517a866d25b097458cb7
|
3 |
+
size 8622
|
model-bin/finetune/base/log/events.out.tfevents.1629864323.7e498afd5545.905.38
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6722987f95122e78819d706159160678fa62fb12d14d6492cbbfa9a30a314fc4
|
3 |
+
size 8622
|
model-bin/finetune/base/log/events.out.tfevents.1629864981.7e498afd5545.905.40
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4d3ee75109965dc69964e42043268b1b098412833890c58ab418dbd406b3c44b
|
3 |
+
size 8622
|