poca-SoccerTwos / run_logs /timers.json
AliCampbellKhaya's picture
First Commit`
fd3701c
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.4397377967834473,
"min": 2.3143255710601807,
"max": 3.2371737957000732,
"count": 1000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 48794.7578125,
"min": 5251.326171875,
"max": 117436.7109375,
"count": 1000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 73.43283582089552,
"min": 46.91919191919192,
"max": 999.0,
"count": 1000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19680.0,
"min": 7992.0,
"max": 31780.0,
"count": 1000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1531.167342426032,
"min": 1207.752528171942,
"max": 1574.1534336682091,
"count": 962
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 205176.4238850883,
"min": 2415.505056343884,
"max": 307319.6599696821,
"count": 962
},
"SoccerTwos.Step.mean": {
"value": 10499974.0,
"min": 499734.0,
"max": 10499974.0,
"count": 1001
},
"SoccerTwos.Step.sum": {
"value": 10499974.0,
"min": 499734.0,
"max": 10499974.0,
"count": 1001
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.05285456031560898,
"min": -0.15564526617527008,
"max": 0.13551953434944153,
"count": 1001
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -7.1353654861450195,
"min": -24.747596740722656,
"max": 20.919309616088867,
"count": 1001
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.05140404403209686,
"min": -0.15863391757011414,
"max": 0.14230753481388092,
"count": 1001
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -6.93954610824585,
"min": -25.222793579101562,
"max": 22.070335388183594,
"count": 1001
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1001
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1001
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.12560740576850044,
"min": -0.540971428155899,
"max": 0.401263155435261,
"count": 1001
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -16.95699977874756,
"min": -50.82279992103577,
"max": 65.16880011558533,
"count": 1001
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.12560740576850044,
"min": -0.540971428155899,
"max": 0.401263155435261,
"count": 1001
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -16.95699977874756,
"min": -50.82279992103577,
"max": 65.16880011558533,
"count": 1001
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1001
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1001
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.016887286700269517,
"min": 0.011112152040974857,
"max": 0.02375900255938177,
"count": 482
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.016887286700269517,
"min": 0.011112152040974857,
"max": 0.02375900255938177,
"count": 482
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09483099778493245,
"min": 1.0459893114026878e-06,
"max": 0.12906097347537676,
"count": 482
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09483099778493245,
"min": 1.0459893114026878e-06,
"max": 0.12906097347537676,
"count": 482
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.09558213179310163,
"min": 1.0327452438711285e-06,
"max": 0.13083152001102766,
"count": 482
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.09558213179310163,
"min": 1.0327452438711285e-06,
"max": 0.13083152001102766,
"count": 482
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 482
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 482
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.30000000000000004,
"min": 0.30000000000000004,
"max": 0.30000000000000004,
"count": 482
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.30000000000000004,
"min": 0.30000000000000004,
"max": 0.30000000000000004,
"count": 482
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.010000000000000002,
"min": 0.010000000000000002,
"max": 0.010000000000000002,
"count": 482
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.010000000000000002,
"min": 0.010000000000000002,
"max": 0.010000000000000002,
"count": 482
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1683839191",
"python_version": "3.9.16 (main, Mar 8 2023, 10:39:24) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\alica\\anaconda3\\envs\\soccer_rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.1+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1683897272"
},
"total": 58080.5491508,
"count": 1,
"self": 3.364133299997775,
"children": {
"run_training.setup": {
"total": 0.15131649999999963,
"count": 1,
"self": 0.15131649999999963
},
"TrainerController.start_learning": {
"total": 58077.033701,
"count": 1,
"self": 28.36459649718745,
"children": {
"TrainerController._reset_env": {
"total": 7.703580700025107,
"count": 68,
"self": 7.703580700025107
},
"TrainerController.advance": {
"total": 58040.60999380278,
"count": 681485,
"self": 30.837599103950197,
"children": {
"env_step": {
"total": 21412.92841939736,
"count": 681485,
"self": 16694.588936696888,
"children": {
"SubprocessEnvManager._take_step": {
"total": 4701.072346798175,
"count": 681485,
"self": 185.5707896962267,
"children": {
"TorchPolicy.evaluate": {
"total": 4515.5015571019485,
"count": 1264528,
"self": 4515.5015571019485
}
}
},
"workers": {
"total": 17.26713590229859,
"count": 681484,
"self": 0.0,
"children": {
"worker_root": {
"total": 58032.03187709827,
"count": 681484,
"is_parallel": true,
"self": 44532.123092398644,
"children": {
"steps_from_proto": {
"total": 0.25089240001161706,
"count": 136,
"is_parallel": true,
"self": 0.04736130005872319,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.20353109995289387,
"count": 544,
"is_parallel": true,
"self": 0.20353109995289387
}
}
},
"UnityEnvironment.step": {
"total": 13499.65789229961,
"count": 681484,
"is_parallel": true,
"self": 801.3268785012751,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 575.217196400056,
"count": 681484,
"is_parallel": true,
"self": 575.217196400056
},
"communicator.exchange": {
"total": 9556.806738498799,
"count": 681484,
"is_parallel": true,
"self": 9556.806738498799
},
"steps_from_proto": {
"total": 2566.307078899482,
"count": 1362968,
"is_parallel": true,
"self": 487.2911451955056,
"children": {
"_process_rank_one_or_two_observation": {
"total": 2079.0159337039763,
"count": 5451872,
"is_parallel": true,
"self": 2079.0159337039763
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 36596.84397530147,
"count": 681484,
"self": 187.30504210065556,
"children": {
"process_trajectory": {
"total": 4954.835507400845,
"count": 681484,
"self": 4949.422675500855,
"children": {
"RLTrainer._checkpoint": {
"total": 5.412831899990039,
"count": 21,
"self": 5.412831899990039
}
}
},
"_update_policy": {
"total": 31454.70342579997,
"count": 482,
"self": 2522.0402537999544,
"children": {
"TorchPOCAOptimizer.update": {
"total": 28932.663172000015,
"count": 14472,
"self": 28932.663172000015
}
}
}
}
}
}
},
"trainer_threads": {
"total": 5.30000397702679e-06,
"count": 1,
"self": 5.30000397702679e-06
},
"TrainerController._save_models": {
"total": 0.35552470000402536,
"count": 1,
"self": 0.02085079999960726,
"children": {
"RLTrainer._checkpoint": {
"total": 0.3346739000044181,
"count": 1,
"self": 0.3346739000044181
}
}
}
}
}
}
}