ppo-Pyramids / run_logs /timers.json
damnloveless's picture
First Push
1141c6a
raw
history blame
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.537469208240509,
"min": 0.537469208240509,
"max": 1.4602720737457275,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 16081.078125,
"min": 16081.078125,
"max": 44298.8125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989939.0,
"min": 29952.0,
"max": 989939.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989939.0,
"min": 29952.0,
"max": 989939.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5477339625358582,
"min": -0.1155085414648056,
"max": 0.5477339625358582,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 152.81777954101562,
"min": -27.953067779541016,
"max": 152.81777954101562,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.007286582607775927,
"min": -0.03917496278882027,
"max": 0.2708534896373749,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 2.032956600189209,
"min": -10.381364822387695,
"max": 65.54654693603516,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06870663790600484,
"min": 0.06585085634448688,
"max": 0.07362447643685692,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9618929306840678,
"min": 0.511078340184725,
"max": 1.0547077266268852,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01502501104481607,
"min": 0.0006209250926686348,
"max": 0.01502501104481607,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21035015462742498,
"min": 0.008692951297360888,
"max": 0.21035015462742498,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.384747538450001e-06,
"min": 7.384747538450001e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010338646553830001,
"min": 0.00010338646553830001,
"max": 0.0036332071889310005,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10246155000000001,
"min": 0.10246155000000001,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4344617000000002,
"min": 1.3886848,
"max": 2.6110689999999996,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025590884500000015,
"min": 0.00025590884500000015,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035827238300000022,
"min": 0.0035827238300000022,
"max": 0.1211257931,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011887529864907265,
"min": 0.01175712700933218,
"max": 0.40382641553878784,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.166425421833992,
"min": 0.16459977626800537,
"max": 2.82678484916687,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 343.22222222222223,
"min": 343.22222222222223,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30890.0,
"min": 15984.0,
"max": 32989.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5355888722671402,
"min": -1.0000000521540642,
"max": 1.5395487013917704,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 138.20299850404263,
"min": -28.197001732885838,
"max": 138.20299850404263,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5355888722671402,
"min": -1.0000000521540642,
"max": 1.5395487013917704,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 138.20299850404263,
"min": -28.197001732885838,
"max": 138.20299850404263,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04327827774638233,
"min": 0.04327827774638233,
"max": 7.453175722621381,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.895044997174409,
"min": 3.4759267813933548,
"max": 119.2508115619421,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1698956229",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.0+cu118",
"numpy_version": "1.23.5",
"end_time_seconds": "1698957682"
},
"total": 1453.056970418,
"count": 1,
"self": 0.371980210999709,
"children": {
"run_training.setup": {
"total": 0.04032402200004981,
"count": 1,
"self": 0.04032402200004981
},
"TrainerController.start_learning": {
"total": 1452.6446661850002,
"count": 1,
"self": 1.0569437970113995,
"children": {
"TrainerController._reset_env": {
"total": 3.385779236000076,
"count": 1,
"self": 3.385779236000076
},
"TrainerController.advance": {
"total": 1448.1311863409892,
"count": 63634,
"self": 1.0902291411184706,
"children": {
"env_step": {
"total": 935.0803603578981,
"count": 63634,
"self": 829.4690710919295,
"children": {
"SubprocessEnvManager._take_step": {
"total": 104.96687454795483,
"count": 63634,
"self": 3.8377414529923044,
"children": {
"TorchPolicy.evaluate": {
"total": 101.12913309496253,
"count": 62561,
"self": 101.12913309496253
}
}
},
"workers": {
"total": 0.6444147180137634,
"count": 63634,
"self": 0.0,
"children": {
"worker_root": {
"total": 1450.8360712980016,
"count": 63634,
"is_parallel": true,
"self": 702.0642350619721,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0018041579999135138,
"count": 1,
"is_parallel": true,
"self": 0.0005561049997595546,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012480530001539591,
"count": 8,
"is_parallel": true,
"self": 0.0012480530001539591
}
}
},
"UnityEnvironment.step": {
"total": 0.03364381200003663,
"count": 1,
"is_parallel": true,
"self": 0.00033310100013750343,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0002834050000046773,
"count": 1,
"is_parallel": true,
"self": 0.0002834050000046773
},
"communicator.exchange": {
"total": 0.03195716999994147,
"count": 1,
"is_parallel": true,
"self": 0.03195716999994147
},
"steps_from_proto": {
"total": 0.0010701359999529814,
"count": 1,
"is_parallel": true,
"self": 0.0002338089998374926,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0008363270001154888,
"count": 8,
"is_parallel": true,
"self": 0.0008363270001154888
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 748.7718362360295,
"count": 63633,
"is_parallel": true,
"self": 19.895991991093297,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 13.851949299016155,
"count": 63633,
"is_parallel": true,
"self": 13.851949299016155
},
"communicator.exchange": {
"total": 658.4641740669387,
"count": 63633,
"is_parallel": true,
"self": 658.4641740669387
},
"steps_from_proto": {
"total": 56.55972087898135,
"count": 63633,
"is_parallel": true,
"self": 11.945073903034427,
"children": {
"_process_rank_one_or_two_observation": {
"total": 44.614646975946926,
"count": 509064,
"is_parallel": true,
"self": 44.614646975946926
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 511.9605968419727,
"count": 63634,
"self": 2.0591762999576986,
"children": {
"process_trajectory": {
"total": 97.112013756014,
"count": 63634,
"self": 96.94710044301405,
"children": {
"RLTrainer._checkpoint": {
"total": 0.16491331299994272,
"count": 2,
"self": 0.16491331299994272
}
}
},
"_update_policy": {
"total": 412.789406786001,
"count": 453,
"self": 239.33156542198867,
"children": {
"TorchPPOOptimizer.update": {
"total": 173.45784136401232,
"count": 22809,
"self": 173.45784136401232
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.869997145666275e-07,
"count": 1,
"self": 8.869997145666275e-07
},
"TrainerController._save_models": {
"total": 0.07075592399996822,
"count": 1,
"self": 0.0013630660000671924,
"children": {
"RLTrainer._checkpoint": {
"total": 0.06939285799990103,
"count": 1,
"self": 0.06939285799990103
}
}
}
}
}
}
}