poca-SoccerTwos / run_logs /timers.json
csabazs's picture
First Push
7755469 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.614660382270813,
"min": 1.5329911708831787,
"max": 3.1653454303741455,
"count": 946
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 32654.890625,
"min": 6200.6533203125,
"max": 116047.109375,
"count": 946
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 49.21212121212121,
"min": 41.33050847457627,
"max": 999.0,
"count": 946
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19488.0,
"min": 3996.0,
"max": 31144.0,
"count": 946
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1590.3808205730516,
"min": 1210.6657767586505,
"max": 1619.790326024402,
"count": 941
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 314895.4024734642,
"min": 2424.0827653708648,
"max": 377513.0365536685,
"count": 941
},
"SoccerTwos.Step.mean": {
"value": 9999992.0,
"min": 549657.0,
"max": 9999992.0,
"count": 946
},
"SoccerTwos.Step.sum": {
"value": 9999992.0,
"min": 549657.0,
"max": 9999992.0,
"count": 946
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.07650735974311829,
"min": -0.11666654795408249,
"max": 0.21403224766254425,
"count": 946
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -15.22496509552002,
"min": -21.466644287109375,
"max": 29.588966369628906,
"count": 946
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.07262206077575684,
"min": -0.12031123787164688,
"max": 0.2111627459526062,
"count": 946
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -14.451789855957031,
"min": -22.13726806640625,
"max": 29.504426956176758,
"count": 946
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 946
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 946
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.16366834197212105,
"min": -0.6058875005692244,
"max": 0.4048776696029219,
"count": 946
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -32.57000005245209,
"min": -66.00479984283447,
"max": 55.14299935102463,
"count": 946
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.16366834197212105,
"min": -0.6058875005692244,
"max": 0.4048776696029219,
"count": 946
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -32.57000005245209,
"min": -66.00479984283447,
"max": 55.14299935102463,
"count": 946
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 946
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 946
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.015870315426339706,
"min": 0.00968948356846037,
"max": 0.0240408629955103,
"count": 456
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.015870315426339706,
"min": 0.00968948356846037,
"max": 0.0240408629955103,
"count": 456
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10202294091383617,
"min": 0.00020218975963265013,
"max": 0.12481951639056206,
"count": 456
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10202294091383617,
"min": 0.00020218975963265013,
"max": 0.12481951639056206,
"count": 456
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10368427063028017,
"min": 0.00021083913209925717,
"max": 0.12807482952872912,
"count": 456
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10368427063028017,
"min": 0.00021083913209925717,
"max": 0.12807482952872912,
"count": 456
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 456
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 456
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 456
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 456
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 456
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 456
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1735412660",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/home/kane/miniconda3/envs/soccer_rl/bin/mlagents-learn config/poca/SoccerTwos.yaml --env=training-envs-executables/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.1+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1735419135"
},
"total": 6475.497817641,
"count": 1,
"self": 0.16737230699072825,
"children": {
"run_training.setup": {
"total": 0.010108270005730446,
"count": 1,
"self": 0.010108270005730446
},
"TrainerController.start_learning": {
"total": 6475.320337064004,
"count": 1,
"self": 5.83195191447885,
"children": {
"TrainerController._reset_env": {
"total": 1.3394832180274534,
"count": 49,
"self": 1.3394832180274534
},
"TrainerController.advance": {
"total": 6468.083171648497,
"count": 651443,
"self": 5.450060901894176,
"children": {
"env_step": {
"total": 4922.454020500554,
"count": 651443,
"self": 3595.986672016901,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1322.9116125044602,
"count": 651443,
"self": 39.47856949702691,
"children": {
"TorchPolicy.evaluate": {
"total": 1283.4330430074333,
"count": 1190364,
"self": 1283.4330430074333
}
}
},
"workers": {
"total": 3.555735979192832,
"count": 651443,
"self": 0.0,
"children": {
"worker_root": {
"total": 6467.586811553083,
"count": 651443,
"is_parallel": true,
"self": 3558.7149287097272,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0010675999947125092,
"count": 2,
"is_parallel": true,
"self": 0.0002696989831747487,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0007979010115377605,
"count": 8,
"is_parallel": true,
"self": 0.0007979010115377605
}
}
},
"UnityEnvironment.step": {
"total": 0.011551907002285589,
"count": 1,
"is_parallel": true,
"self": 0.00024401399423368275,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00021315100457286462,
"count": 1,
"is_parallel": true,
"self": 0.00021315100457286462
},
"communicator.exchange": {
"total": 0.010420106002129614,
"count": 1,
"is_parallel": true,
"self": 0.010420106002129614
},
"steps_from_proto": {
"total": 0.0006746360013494268,
"count": 2,
"is_parallel": true,
"self": 0.0001330439918092452,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0005415920095401816,
"count": 8,
"is_parallel": true,
"self": 0.0005415920095401816
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.03312732799531659,
"count": 96,
"is_parallel": true,
"self": 0.006399894969945308,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.026727433025371283,
"count": 384,
"is_parallel": true,
"self": 0.026727433025371283
}
}
},
"UnityEnvironment.step": {
"total": 2908.8387555153604,
"count": 651442,
"is_parallel": true,
"self": 154.0329934919573,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 88.83800150844036,
"count": 651442,
"is_parallel": true,
"self": 88.83800150844036
},
"communicator.exchange": {
"total": 2244.097924694048,
"count": 651442,
"is_parallel": true,
"self": 2244.097924694048
},
"steps_from_proto": {
"total": 421.8698358209149,
"count": 1302884,
"is_parallel": true,
"self": 78.01466316633741,
"children": {
"_process_rank_one_or_two_observation": {
"total": 343.8551726545775,
"count": 5211536,
"is_parallel": true,
"self": 343.8551726545775
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1540.1790902460489,
"count": 651443,
"self": 50.01807652482239,
"children": {
"process_trajectory": {
"total": 746.8638942911348,
"count": 651443,
"self": 745.5166783011227,
"children": {
"RLTrainer._checkpoint": {
"total": 1.347215990012046,
"count": 19,
"self": 1.347215990012046
}
}
},
"_update_policy": {
"total": 743.2971194300917,
"count": 456,
"self": 464.1584625707692,
"children": {
"TorchPOCAOptimizer.update": {
"total": 279.1386568593225,
"count": 13695,
"self": 279.1386568593225
}
}
}
}
}
}
},
"trainer_threads": {
"total": 4.0500162867829204e-07,
"count": 1,
"self": 4.0500162867829204e-07
},
"TrainerController._save_models": {
"total": 0.06572987799881957,
"count": 1,
"self": 0.0008875089988578111,
"children": {
"RLTrainer._checkpoint": {
"total": 0.06484236899996176,
"count": 1,
"self": 0.06484236899996176
}
}
}
}
}
}
}