Upload 6 files
Browse files- optimizer.pt +1 -1
- pytorch_model.bin +1 -1
- rng_state.pth +1 -1
- scheduler.pt +1 -1
- trainer_state.json +3 -162
optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 29910213
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:977f5dd02837def8a67bfdb3840d3749412b546ae910342c39b43901a84dab88
|
3 |
size 29910213
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14943053
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e395710d290bbd2c359f799d1e9d825284da907c47db16a54432acd7ade49a99
|
3 |
size 14943053
|
rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14575
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bb3139ceff3e549f3522c40a6c01d349328aa3dc5b5c6992d05725dbe7df6f41
|
3 |
size 14575
|
scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc26d7730b05980084a8b0329c82ecb03ed93545350d29591fddcdbd41583947
|
3 |
size 627
|
trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.9291033148765564,
|
3 |
"best_model_checkpoint": "segformer-finetuned-segments-cmp-facade-outputs-transforms/checkpoint-360",
|
4 |
-
"epoch":
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -12567,170 +12567,11 @@
|
|
12567 |
"eval_samples_per_second": 0.908,
|
12568 |
"eval_steps_per_second": 0.08,
|
12569 |
"step": 1580
|
12570 |
-
},
|
12571 |
-
{
|
12572 |
-
"epoch": 49.41,
|
12573 |
-
"learning_rate": 7.1249999999999995e-06,
|
12574 |
-
"loss": 0.2603,
|
12575 |
-
"step": 1581
|
12576 |
-
},
|
12577 |
-
{
|
12578 |
-
"epoch": 49.44,
|
12579 |
-
"learning_rate": 6.749999999999999e-06,
|
12580 |
-
"loss": 0.2196,
|
12581 |
-
"step": 1582
|
12582 |
-
},
|
12583 |
-
{
|
12584 |
-
"epoch": 49.47,
|
12585 |
-
"learning_rate": 6.375e-06,
|
12586 |
-
"loss": 0.2607,
|
12587 |
-
"step": 1583
|
12588 |
-
},
|
12589 |
-
{
|
12590 |
-
"epoch": 49.5,
|
12591 |
-
"learning_rate": 5.999999999999999e-06,
|
12592 |
-
"loss": 0.2358,
|
12593 |
-
"step": 1584
|
12594 |
-
},
|
12595 |
-
{
|
12596 |
-
"epoch": 49.53,
|
12597 |
-
"learning_rate": 5.6249999999999995e-06,
|
12598 |
-
"loss": 0.2127,
|
12599 |
-
"step": 1585
|
12600 |
-
},
|
12601 |
-
{
|
12602 |
-
"epoch": 49.56,
|
12603 |
-
"learning_rate": 5.25e-06,
|
12604 |
-
"loss": 0.2152,
|
12605 |
-
"step": 1586
|
12606 |
-
},
|
12607 |
-
{
|
12608 |
-
"epoch": 49.59,
|
12609 |
-
"learning_rate": 4.875e-06,
|
12610 |
-
"loss": 0.2255,
|
12611 |
-
"step": 1587
|
12612 |
-
},
|
12613 |
-
{
|
12614 |
-
"epoch": 49.62,
|
12615 |
-
"learning_rate": 4.499999999999999e-06,
|
12616 |
-
"loss": 0.2264,
|
12617 |
-
"step": 1588
|
12618 |
-
},
|
12619 |
-
{
|
12620 |
-
"epoch": 49.66,
|
12621 |
-
"learning_rate": 4.1249999999999995e-06,
|
12622 |
-
"loss": 0.214,
|
12623 |
-
"step": 1589
|
12624 |
-
},
|
12625 |
-
{
|
12626 |
-
"epoch": 49.69,
|
12627 |
-
"learning_rate": 3.7499999999999997e-06,
|
12628 |
-
"loss": 0.2275,
|
12629 |
-
"step": 1590
|
12630 |
-
},
|
12631 |
-
{
|
12632 |
-
"epoch": 49.72,
|
12633 |
-
"learning_rate": 3.3749999999999995e-06,
|
12634 |
-
"loss": 0.2205,
|
12635 |
-
"step": 1591
|
12636 |
-
},
|
12637 |
-
{
|
12638 |
-
"epoch": 49.75,
|
12639 |
-
"learning_rate": 2.9999999999999997e-06,
|
12640 |
-
"loss": 0.2527,
|
12641 |
-
"step": 1592
|
12642 |
-
},
|
12643 |
-
{
|
12644 |
-
"epoch": 49.78,
|
12645 |
-
"learning_rate": 2.625e-06,
|
12646 |
-
"loss": 0.2436,
|
12647 |
-
"step": 1593
|
12648 |
-
},
|
12649 |
-
{
|
12650 |
-
"epoch": 49.81,
|
12651 |
-
"learning_rate": 2.2499999999999996e-06,
|
12652 |
-
"loss": 0.2245,
|
12653 |
-
"step": 1594
|
12654 |
-
},
|
12655 |
-
{
|
12656 |
-
"epoch": 49.84,
|
12657 |
-
"learning_rate": 1.8749999999999998e-06,
|
12658 |
-
"loss": 0.2298,
|
12659 |
-
"step": 1595
|
12660 |
-
},
|
12661 |
-
{
|
12662 |
-
"epoch": 49.88,
|
12663 |
-
"learning_rate": 1.4999999999999998e-06,
|
12664 |
-
"loss": 0.2435,
|
12665 |
-
"step": 1596
|
12666 |
-
},
|
12667 |
-
{
|
12668 |
-
"epoch": 49.91,
|
12669 |
-
"learning_rate": 1.1249999999999998e-06,
|
12670 |
-
"loss": 0.2179,
|
12671 |
-
"step": 1597
|
12672 |
-
},
|
12673 |
-
{
|
12674 |
-
"epoch": 49.94,
|
12675 |
-
"learning_rate": 7.499999999999999e-07,
|
12676 |
-
"loss": 0.2251,
|
12677 |
-
"step": 1598
|
12678 |
-
},
|
12679 |
-
{
|
12680 |
-
"epoch": 49.97,
|
12681 |
-
"learning_rate": 3.7499999999999996e-07,
|
12682 |
-
"loss": 0.2433,
|
12683 |
-
"step": 1599
|
12684 |
-
},
|
12685 |
-
{
|
12686 |
-
"epoch": 50.0,
|
12687 |
-
"learning_rate": 0.0,
|
12688 |
-
"loss": 0.2299,
|
12689 |
-
"step": 1600
|
12690 |
-
},
|
12691 |
-
{
|
12692 |
-
"epoch": 50.0,
|
12693 |
-
"eval_loss": 1.1650828123092651,
|
12694 |
-
"eval_mean_accuracy": 0.5417148369282497,
|
12695 |
-
"eval_mean_iou": 0.4159080609633914,
|
12696 |
-
"eval_overall_accuracy": 0.7073247474536561,
|
12697 |
-
"eval_per_category_accuracy": [
|
12698 |
-
0.7052524493403106,
|
12699 |
-
0.8016972929429861,
|
12700 |
-
0.7569536807460481,
|
12701 |
-
0.36770737191275565,
|
12702 |
-
0.5672630250381376,
|
12703 |
-
0.5649218440946654,
|
12704 |
-
0.6160593813420158,
|
12705 |
-
0.5216406964023782,
|
12706 |
-
0.363590587650728,
|
12707 |
-
0.5980214471870451,
|
12708 |
-
0.20086436719689757,
|
12709 |
-
0.43660589928502896
|
12710 |
-
],
|
12711 |
-
"eval_per_category_iou": [
|
12712 |
-
0.6059737667522098,
|
12713 |
-
0.5922624046091037,
|
12714 |
-
0.6036323553739977,
|
12715 |
-
0.28742998955776455,
|
12716 |
-
0.3995807331319292,
|
12717 |
-
0.4092660539603954,
|
12718 |
-
0.4687620360847436,
|
12719 |
-
0.4498044099685319,
|
12720 |
-
0.23927425500048172,
|
12721 |
-
0.43375479764946384,
|
12722 |
-
0.16512549537648613,
|
12723 |
-
0.3360304340955885
|
12724 |
-
],
|
12725 |
-
"eval_runtime": 125.2298,
|
12726 |
-
"eval_samples_per_second": 0.91,
|
12727 |
-
"eval_steps_per_second": 0.08,
|
12728 |
-
"step": 1600
|
12729 |
}
|
12730 |
],
|
12731 |
"max_steps": 1600,
|
12732 |
"num_train_epochs": 50,
|
12733 |
-
"total_flos": 3.
|
12734 |
"trial_name": null,
|
12735 |
"trial_params": null
|
12736 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.9291033148765564,
|
3 |
"best_model_checkpoint": "segformer-finetuned-segments-cmp-facade-outputs-transforms/checkpoint-360",
|
4 |
+
"epoch": 49.375,
|
5 |
+
"global_step": 1580,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
12567 |
"eval_samples_per_second": 0.908,
|
12568 |
"eval_steps_per_second": 0.08,
|
12569 |
"step": 1580
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12570 |
}
|
12571 |
],
|
12572 |
"max_steps": 1600,
|
12573 |
"num_train_epochs": 50,
|
12574 |
+
"total_flos": 3.274258121175859e+17,
|
12575 |
"trial_name": null,
|
12576 |
"trial_params": null
|
12577 |
}
|