ppo-PyramidsRND / run_logs /timers.json
edwardjjj's picture
first run
5209ad3
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.49696850776672363,
"min": 0.49696850776672363,
"max": 1.4081650972366333,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 14901.103515625,
"min": 14901.103515625,
"max": 42718.09765625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989980.0,
"min": 29952.0,
"max": 989980.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989980.0,
"min": 29952.0,
"max": 989980.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.41353651881217957,
"min": -0.11066388338804245,
"max": 0.4502978026866913,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 110.82778930664062,
"min": -26.66999626159668,
"max": 122.48100280761719,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.002935063326731324,
"min": -0.01595137268304825,
"max": 0.5726882219314575,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 0.786596953868866,
"min": -4.019745826721191,
"max": 135.72711181640625,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06554523098886766,
"min": 0.06518754058569257,
"max": 0.07485660904604333,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9176332338441473,
"min": 0.5178103298848715,
"max": 1.0494812746862106,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.014787017050929578,
"min": 0.0004327237056574881,
"max": 0.016972040224027807,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.20701823871301409,
"min": 0.005192684467889857,
"max": 0.23760856313638928,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.670411728942855e-06,
"min": 7.670411728942855e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010738576420519998,
"min": 0.00010738576420519998,
"max": 0.0033310932896355996,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10255677142857145,
"min": 0.10255677142857145,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4357948000000003,
"min": 1.3886848,
"max": 2.4431847999999996,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002654214657142857,
"min": 0.0002654214657142857,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.00371590052,
"min": 0.00371590052,
"max": 0.11104540355999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.00718091893941164,
"min": 0.00718091893941164,
"max": 0.5711057782173157,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.10053286701440811,
"min": 0.10053286701440811,
"max": 3.9977405071258545,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 448.6714285714286,
"min": 379.1333333333333,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31407.0,
"min": 15984.0,
"max": 32910.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.3853246083726054,
"min": -1.0000000521540642,
"max": 1.5941866394877433,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 95.58739797770977,
"min": -30.415001675486565,
"max": 119.56399796158075,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.3853246083726054,
"min": -1.0000000521540642,
"max": 1.5941866394877433,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 95.58739797770977,
"min": -30.415001675486565,
"max": 119.56399796158075,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03307373915837767,
"min": 0.031789679408878634,
"max": 12.326904576271772,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.2820880019280594,
"min": 2.2820880019280594,
"max": 197.23047322034836,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1692259243",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1692262095"
},
"total": 2851.622209957,
"count": 1,
"self": 0.5414843069997914,
"children": {
"run_training.setup": {
"total": 0.04826297400001067,
"count": 1,
"self": 0.04826297400001067
},
"TrainerController.start_learning": {
"total": 2851.0324626760003,
"count": 1,
"self": 2.099993585935863,
"children": {
"TrainerController._reset_env": {
"total": 5.999311329000022,
"count": 1,
"self": 5.999311329000022
},
"TrainerController.advance": {
"total": 2842.8192059720645,
"count": 63655,
"self": 2.32555538799852,
"children": {
"env_step": {
"total": 2071.0581755320277,
"count": 63655,
"self": 1909.1592903150497,
"children": {
"SubprocessEnvManager._take_step": {
"total": 160.5613945229868,
"count": 63655,
"self": 6.6033532399659975,
"children": {
"TorchPolicy.evaluate": {
"total": 153.9580412830208,
"count": 62556,
"self": 153.9580412830208
}
}
},
"workers": {
"total": 1.3374906939911284,
"count": 63655,
"self": 0.0,
"children": {
"worker_root": {
"total": 2843.4388888310273,
"count": 63655,
"is_parallel": true,
"self": 1102.4766411729968,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.008791168000016114,
"count": 1,
"is_parallel": true,
"self": 0.006183483999961936,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002607684000054178,
"count": 8,
"is_parallel": true,
"self": 0.002607684000054178
}
}
},
"UnityEnvironment.step": {
"total": 0.05979317000003448,
"count": 1,
"is_parallel": true,
"self": 0.0007414430000949324,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005974029999720187,
"count": 1,
"is_parallel": true,
"self": 0.0005974029999720187
},
"communicator.exchange": {
"total": 0.056023148000008405,
"count": 1,
"is_parallel": true,
"self": 0.056023148000008405
},
"steps_from_proto": {
"total": 0.002431175999959123,
"count": 1,
"is_parallel": true,
"self": 0.00046840100003464613,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001962774999924477,
"count": 8,
"is_parallel": true,
"self": 0.001962774999924477
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1740.9622476580305,
"count": 63654,
"is_parallel": true,
"self": 45.647660876142936,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 30.185809399966047,
"count": 63654,
"is_parallel": true,
"self": 30.185809399966047
},
"communicator.exchange": {
"total": 1524.5041797599793,
"count": 63654,
"is_parallel": true,
"self": 1524.5041797599793
},
"steps_from_proto": {
"total": 140.62459762194226,
"count": 63654,
"is_parallel": true,
"self": 29.268426549997628,
"children": {
"_process_rank_one_or_two_observation": {
"total": 111.35617107194463,
"count": 509232,
"is_parallel": true,
"self": 111.35617107194463
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 769.4354750520382,
"count": 63655,
"self": 4.156754031018977,
"children": {
"process_trajectory": {
"total": 139.59140136401396,
"count": 63655,
"self": 139.3344362590139,
"children": {
"RLTrainer._checkpoint": {
"total": 0.256965105000063,
"count": 2,
"self": 0.256965105000063
}
}
},
"_update_policy": {
"total": 625.6873196570052,
"count": 449,
"self": 404.4704455540059,
"children": {
"TorchPPOOptimizer.update": {
"total": 221.21687410299933,
"count": 22821,
"self": 221.21687410299933
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.216999862663215e-06,
"count": 1,
"self": 1.216999862663215e-06
},
"TrainerController._save_models": {
"total": 0.11395057199979419,
"count": 1,
"self": 0.0017667850001998886,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1121837869995943,
"count": 1,
"self": 0.1121837869995943
}
}
}
}
}
}
}