poca-SoccerTwos / run_logs /timers.json
Leventiir's picture
First Push
6ab751a
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5368013381958008,
"min": 1.4798517227172852,
"max": 1.717839002609253,
"count": 1387
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 30391.783203125,
"min": 26218.884765625,
"max": 39726.2421875,
"count": 1387
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 77.85714285714286,
"min": 37.12977099236641,
"max": 93.54716981132076,
"count": 1387
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19620.0,
"min": 18048.0,
"max": 21044.0,
"count": 1387
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1754.3647881834338,
"min": 1663.1415280356007,
"max": 1790.4749953370062,
"count": 1387
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 221049.96331111266,
"min": 186001.52819740935,
"max": 454293.69494184444,
"count": 1387
},
"SoccerTwos.Step.mean": {
"value": 23799980.0,
"min": 9939999.0,
"max": 23799980.0,
"count": 1387
},
"SoccerTwos.Step.sum": {
"value": 23799980.0,
"min": 9939999.0,
"max": 23799980.0,
"count": 1387
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.05803244933485985,
"min": -0.15242762863636017,
"max": 0.11841320246458054,
"count": 1387
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -7.370121002197266,
"min": -25.455413818359375,
"max": 21.788028717041016,
"count": 1387
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.05722297355532646,
"min": -0.1545315533876419,
"max": 0.12288798391819,
"count": 1387
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -7.267317771911621,
"min": -25.80677032470703,
"max": 22.61138916015625,
"count": 1387
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1387
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1387
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.10333070839483907,
"min": -0.4309287514537573,
"max": 0.2959999988792808,
"count": 1387
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -13.122999966144562,
"min": -68.94860023260117,
"max": 52.48499971628189,
"count": 1387
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.10333070839483907,
"min": -0.4309287514537573,
"max": 0.2959999988792808,
"count": 1387
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -13.122999966144562,
"min": -68.94860023260117,
"max": 52.48499971628189,
"count": 1387
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1387
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1387
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.0157304969499819,
"min": 0.01113445025572825,
"max": 0.023960910368865978,
"count": 673
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.0157304969499819,
"min": 0.01113445025572825,
"max": 0.023960910368865978,
"count": 673
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09982962533831596,
"min": 0.08123996083935102,
"max": 0.13158326968550682,
"count": 673
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09982962533831596,
"min": 0.08123996083935102,
"max": 0.13158326968550682,
"count": 673
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10118018761277199,
"min": 0.08227557465434074,
"max": 0.13501238425572712,
"count": 673
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10118018761277199,
"min": 0.08227557465434074,
"max": 0.13501238425572712,
"count": 673
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 673
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 673
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 673
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 673
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 673
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 673
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1700170035",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\Nicolas\\AppData\\Local\\miniconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1700206030"
},
"total": 35995.33808009999,
"count": 1,
"self": 0.22053069998219144,
"children": {
"run_training.setup": {
"total": 0.10423989999981131,
"count": 1,
"self": 0.10423989999981131
},
"TrainerController.start_learning": {
"total": 35995.013309500006,
"count": 1,
"self": 25.657995370667777,
"children": {
"TrainerController._reset_env": {
"total": 8.357257200070308,
"count": 71,
"self": 8.357257200070308
},
"TrainerController.advance": {
"total": 35960.76777922927,
"count": 963579,
"self": 23.942669511641725,
"children": {
"env_step": {
"total": 15560.645170511954,
"count": 963579,
"self": 12223.690495495524,
"children": {
"SubprocessEnvManager._take_step": {
"total": 3321.942329308251,
"count": 963579,
"self": 122.52573752241733,
"children": {
"TorchPolicy.evaluate": {
"total": 3199.4165917858336,
"count": 1741048,
"self": 3199.4165917858336
}
}
},
"workers": {
"total": 15.012345708179055,
"count": 963579,
"self": 0.0,
"children": {
"worker_root": {
"total": 35957.90660739917,
"count": 963579,
"is_parallel": true,
"self": 26310.496169595644,
"children": {
"steps_from_proto": {
"total": 0.1260338000865886,
"count": 142,
"is_parallel": true,
"self": 0.025274900341173634,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.10075889974541496,
"count": 568,
"is_parallel": true,
"self": 0.10075889974541496
}
}
},
"UnityEnvironment.step": {
"total": 9647.28440400344,
"count": 963579,
"is_parallel": true,
"self": 481.2311691270734,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 444.0392231961887,
"count": 963579,
"is_parallel": true,
"self": 444.0392231961887
},
"communicator.exchange": {
"total": 7130.5163158940995,
"count": 963579,
"is_parallel": true,
"self": 7130.5163158940995
},
"steps_from_proto": {
"total": 1591.497695786078,
"count": 1927158,
"is_parallel": true,
"self": 324.25971953383123,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1267.2379762522469,
"count": 7708632,
"is_parallel": true,
"self": 1267.2379762522469
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 20376.17993920567,
"count": 963578,
"self": 177.7759565134911,
"children": {
"process_trajectory": {
"total": 3911.5704576928692,
"count": 963578,
"self": 3907.8274015928328,
"children": {
"RLTrainer._checkpoint": {
"total": 3.7430561000364833,
"count": 28,
"self": 3.7430561000364833
}
}
},
"_update_policy": {
"total": 16286.83352499931,
"count": 673,
"self": 2000.1610075011122,
"children": {
"TorchPOCAOptimizer.update": {
"total": 14286.672517498198,
"count": 20190,
"self": 14286.672517498198
}
}
}
}
}
}
},
"trainer_threads": {
"total": 4.3000036384910345e-06,
"count": 1,
"self": 4.3000036384910345e-06
},
"TrainerController._save_models": {
"total": 0.23027339999680407,
"count": 1,
"self": 0.012849499995354563,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2174239000014495,
"count": 1,
"self": 0.2174239000014495
}
}
}
}
}
}
}