pocaSoccerTwos / run_logs /timers.json
wooihen's picture
First Push
b6f9ecf
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.335242509841919,
"min": 1.2497478723526,
"max": 1.65972900390625,
"count": 3016
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 25807.568359375,
"min": 10785.5888671875,
"max": 37479.4296875,
"count": 3016
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 60.28915662650602,
"min": 41.96491228070175,
"max": 127.0,
"count": 3016
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 20016.0,
"min": 4132.0,
"max": 21648.0,
"count": 3016
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1684.106472585243,
"min": 1534.9586645687386,
"max": 1720.918212215921,
"count": 3016
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 279561.67444915033,
"min": 78232.41097047081,
"max": 391619.8340776406,
"count": 3016
},
"SoccerTwos.Step.mean": {
"value": 49999916.0,
"min": 19849972.0,
"max": 49999916.0,
"count": 3016
},
"SoccerTwos.Step.sum": {
"value": 49999916.0,
"min": 19849972.0,
"max": 49999916.0,
"count": 3016
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.00577916344627738,
"min": -0.1655862182378769,
"max": 0.11256951093673706,
"count": 3016
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -0.9593411684036255,
"min": -36.26338195800781,
"max": 19.361955642700195,
"count": 3016
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.005254586227238178,
"min": -0.16391198337078094,
"max": 0.11621252447366714,
"count": 3016
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.8722612857818604,
"min": -35.896724700927734,
"max": 19.988554000854492,
"count": 3016
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 3016
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 3016
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.13476626341601453,
"min": -0.4538626866554146,
"max": 0.359811628280684,
"count": 3016
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -22.37119972705841,
"min": -72.66999995708466,
"max": 62.75700056552887,
"count": 3016
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.13476626341601453,
"min": -0.4538626866554146,
"max": 0.359811628280684,
"count": 3016
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -22.37119972705841,
"min": -72.66999995708466,
"max": 62.75700056552887,
"count": 3016
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 3016
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 3016
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01790835737483576,
"min": 0.010325214187226569,
"max": 0.02559798234530414,
"count": 1463
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01790835737483576,
"min": 0.010325214187226569,
"max": 0.02559798234530414,
"count": 1463
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10707559213042259,
"min": 0.07452068577210108,
"max": 0.13027308781941732,
"count": 1463
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10707559213042259,
"min": 0.07452068577210108,
"max": 0.13027308781941732,
"count": 1463
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10860733091831207,
"min": 0.07558782448371251,
"max": 0.13306420942147573,
"count": 1463
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10860733091831207,
"min": 0.07558782448371251,
"max": 0.13306420942147573,
"count": 1463
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 1463
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 1463
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 1463
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 1463
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 1463
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 1463
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1677488444",
"python_version": "3.9.16 (main, Jan 11 2023, 16:16:36) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "D:\\ProgramData\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.13.1+cu117",
"numpy_version": "1.21.2",
"end_time_seconds": "1677557751"
},
"total": 69302.9482449,
"count": 1,
"self": 0.3338330000115093,
"children": {
"run_training.setup": {
"total": 0.09748789999999996,
"count": 1,
"self": 0.09748789999999996
},
"TrainerController.start_learning": {
"total": 69302.516924,
"count": 1,
"self": 54.97918619780103,
"children": {
"TrainerController._reset_env": {
"total": 10.728283500000272,
"count": 152,
"self": 10.728283500000272
},
"TrainerController.advance": {
"total": 69236.48009080222,
"count": 2084404,
"self": 60.12522281125712,
"children": {
"env_step": {
"total": 50163.952168593896,
"count": 2084404,
"self": 30355.319812292288,
"children": {
"SubprocessEnvManager._take_step": {
"total": 19777.416134604282,
"count": 2084404,
"self": 361.2965211059782,
"children": {
"TorchPolicy.evaluate": {
"total": 19416.119613498304,
"count": 3783858,
"self": 19416.119613498304
}
}
},
"workers": {
"total": 31.216221697326382,
"count": 2084404,
"self": 0.0,
"children": {
"worker_root": {
"total": 69201.5203579982,
"count": 2084404,
"is_parallel": true,
"self": 44271.920437604196,
"children": {
"steps_from_proto": {
"total": 0.23890029995652817,
"count": 304,
"is_parallel": true,
"self": 0.05334309982871677,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.1855572001278114,
"count": 1216,
"is_parallel": true,
"self": 0.1855572001278114
}
}
},
"UnityEnvironment.step": {
"total": 24929.361020094053,
"count": 2084404,
"is_parallel": true,
"self": 954.357082805036,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 835.8253819955305,
"count": 2084404,
"is_parallel": true,
"self": 835.8253819955305
},
"communicator.exchange": {
"total": 20064.6166544992,
"count": 2084404,
"is_parallel": true,
"self": 20064.6166544992
},
"steps_from_proto": {
"total": 3074.561900794286,
"count": 4168808,
"is_parallel": true,
"self": 671.429250614442,
"children": {
"_process_rank_one_or_two_observation": {
"total": 2403.132650179844,
"count": 16675232,
"is_parallel": true,
"self": 2403.132650179844
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 19012.402699397066,
"count": 2084404,
"self": 393.61732959251094,
"children": {
"process_trajectory": {
"total": 9976.830412104677,
"count": 2084404,
"self": 9956.102462904686,
"children": {
"RLTrainer._checkpoint": {
"total": 20.727949199990974,
"count": 61,
"self": 20.727949199990974
}
}
},
"_update_policy": {
"total": 8641.954957699878,
"count": 1463,
"self": 4842.044250799509,
"children": {
"TorchPOCAOptimizer.update": {
"total": 3799.9107069003685,
"count": 43890,
"self": 3799.9107069003685
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3999961083754897e-06,
"count": 1,
"self": 1.3999961083754897e-06
},
"TrainerController._save_models": {
"total": 0.3293620999902487,
"count": 1,
"self": 0.004451099986908957,
"children": {
"RLTrainer._checkpoint": {
"total": 0.3249110000033397,
"count": 1,
"self": 0.3249110000033397
}
}
}
}
}
}
}