ppo-Pyramids / run_logs /timers.json
AngelUrq's picture
First push
4aaf4d5
raw
history blame
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.360578715801239,
"min": 0.360578715801239,
"max": 1.468069076538086,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 10771.20703125,
"min": 10771.20703125,
"max": 44535.34375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989961.0,
"min": 29952.0,
"max": 989961.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989961.0,
"min": 29952.0,
"max": 989961.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5598650574684143,
"min": -0.19676291942596436,
"max": 0.6541990637779236,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 156.76222229003906,
"min": -46.6328125,
"max": 185.7925262451172,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.011289467103779316,
"min": -0.004536532796919346,
"max": 0.3736550509929657,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 3.161050796508789,
"min": -1.1658889055252075,
"max": 89.67721557617188,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07145801514464742,
"min": 0.06564152388341932,
"max": 0.07341632298713992,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0004122120250638,
"min": 0.510575461510138,
"max": 1.0656952074108021,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.014710568989920871,
"min": 0.000771239985513966,
"max": 0.01843986393560911,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2059479658588922,
"min": 0.010026119811681557,
"max": 0.2581580950985275,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.3508046926214245e-06,
"min": 7.3508046926214245e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010291126569669994,
"min": 0.00010291126569669994,
"max": 0.0036325885891371995,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10245023571428571,
"min": 0.10245023571428571,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4343033,
"min": 1.3886848,
"max": 2.6175442999999996,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002547785478571428,
"min": 0.0002547785478571428,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003566899669999999,
"min": 0.003566899669999999,
"max": 0.12110519372000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.014601359143853188,
"min": 0.014601359143853188,
"max": 0.4007905125617981,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.20441903173923492,
"min": 0.20441903173923492,
"max": 2.8055336475372314,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 327.92391304347825,
"min": 301.1734693877551,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30169.0,
"min": 15984.0,
"max": 32596.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6068347701397927,
"min": -1.0000000521540642,
"max": 1.6784081514544633,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 147.82879885286093,
"min": -30.628001749515533,
"max": 164.6503985375166,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6068347701397927,
"min": -1.0000000521540642,
"max": 1.6784081514544633,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 147.82879885286093,
"min": -30.628001749515533,
"max": 164.6503985375166,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04982187059795251,
"min": 0.04849430431649849,
"max": 7.057838655076921,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.583612095011631,
"min": 4.583612095011631,
"max": 112.92541848123074,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1678846594",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1678848857"
},
"total": 2262.63163545,
"count": 1,
"self": 0.4746320470003411,
"children": {
"run_training.setup": {
"total": 0.11723707600003763,
"count": 1,
"self": 0.11723707600003763
},
"TrainerController.start_learning": {
"total": 2262.0397663269996,
"count": 1,
"self": 1.5498802130141485,
"children": {
"TrainerController._reset_env": {
"total": 5.74632676300007,
"count": 1,
"self": 5.74632676300007
},
"TrainerController.advance": {
"total": 2254.6491039169855,
"count": 64031,
"self": 1.6656512050326455,
"children": {
"env_step": {
"total": 1628.362624779945,
"count": 64031,
"self": 1510.5086602418824,
"children": {
"SubprocessEnvManager._take_step": {
"total": 116.91132996601823,
"count": 64031,
"self": 5.171159316977082,
"children": {
"TorchPolicy.evaluate": {
"total": 111.74017064904115,
"count": 62573,
"self": 111.74017064904115
}
}
},
"workers": {
"total": 0.942634572044426,
"count": 64031,
"self": 0.0,
"children": {
"worker_root": {
"total": 2256.521349377988,
"count": 64031,
"is_parallel": true,
"self": 872.3767364309792,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.002018566000060673,
"count": 1,
"is_parallel": true,
"self": 0.0006576529999620107,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013609130000986625,
"count": 8,
"is_parallel": true,
"self": 0.0013609130000986625
}
}
},
"UnityEnvironment.step": {
"total": 0.06041286800018497,
"count": 1,
"is_parallel": true,
"self": 0.0005688400001417904,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005061509998540714,
"count": 1,
"is_parallel": true,
"self": 0.0005061509998540714
},
"communicator.exchange": {
"total": 0.05758007600002202,
"count": 1,
"is_parallel": true,
"self": 0.05758007600002202
},
"steps_from_proto": {
"total": 0.0017578010001670918,
"count": 1,
"is_parallel": true,
"self": 0.0004253219999554858,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001332479000211606,
"count": 8,
"is_parallel": true,
"self": 0.001332479000211606
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1384.1446129470087,
"count": 64030,
"is_parallel": true,
"self": 31.83002621810374,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 24.13571891999254,
"count": 64030,
"is_parallel": true,
"self": 24.13571891999254
},
"communicator.exchange": {
"total": 1229.3480306939566,
"count": 64030,
"is_parallel": true,
"self": 1229.3480306939566
},
"steps_from_proto": {
"total": 98.83083711495578,
"count": 64030,
"is_parallel": true,
"self": 21.937287657998695,
"children": {
"_process_rank_one_or_two_observation": {
"total": 76.89354945695709,
"count": 512240,
"is_parallel": true,
"self": 76.89354945695709
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 624.6208279320081,
"count": 64031,
"self": 2.852424069007384,
"children": {
"process_trajectory": {
"total": 123.59656502499593,
"count": 64031,
"self": 123.387784206996,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20878081799992287,
"count": 2,
"self": 0.20878081799992287
}
}
},
"_update_policy": {
"total": 498.17183883800476,
"count": 451,
"self": 315.46406643994874,
"children": {
"TorchPPOOptimizer.update": {
"total": 182.70777239805602,
"count": 22818,
"self": 182.70777239805602
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2850000530306716e-06,
"count": 1,
"self": 1.2850000530306716e-06
},
"TrainerController._save_models": {
"total": 0.09445414899983007,
"count": 1,
"self": 0.0013746549998359114,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09307949399999416,
"count": 1,
"self": 0.09307949399999416
}
}
}
}
}
}
}