rl-ppo-Huggy / run_logs /training_status.json
Ketan3101's picture
Huggy
461e753 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199757,
"file_path": "results/Huggy2/Huggy/Huggy-199757.onnx",
"reward": 3.6011855513302247,
"creation_time": 1710137369.998366,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199757.pt"
]
},
{
"steps": 399996,
"file_path": "results/Huggy2/Huggy/Huggy-399996.onnx",
"reward": 4.073628755716177,
"creation_time": 1710137630.9276423,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399996.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy2/Huggy/Huggy-599942.onnx",
"reward": 4.56474126302279,
"creation_time": 1710137893.6625113,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799948,
"file_path": "results/Huggy2/Huggy/Huggy-799948.onnx",
"reward": 3.768518225186401,
"creation_time": 1710138155.331324,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799948.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy2/Huggy/Huggy-999960.onnx",
"reward": 3.776572366670377,
"creation_time": 1710138421.3850737,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199936,
"file_path": "results/Huggy2/Huggy/Huggy-1199936.onnx",
"reward": 3.940495392254421,
"creation_time": 1710138687.6832957,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199936.pt"
]
},
{
"steps": 1399960,
"file_path": "results/Huggy2/Huggy/Huggy-1399960.onnx",
"reward": 4.220977273854342,
"creation_time": 1710138956.0775683,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399960.pt"
]
},
{
"steps": 1599989,
"file_path": "results/Huggy2/Huggy/Huggy-1599989.onnx",
"reward": 4.081555638859865,
"creation_time": 1710139220.012591,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599989.pt"
]
},
{
"steps": 1799936,
"file_path": "results/Huggy2/Huggy/Huggy-1799936.onnx",
"reward": 3.8251642466866396,
"creation_time": 1710139487.4294999,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799936.pt"
]
},
{
"steps": 1999963,
"file_path": "results/Huggy2/Huggy/Huggy-1999963.onnx",
"reward": 3.8066644883155822,
"creation_time": 1710139754.5984316,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999963.pt"
]
},
{
"steps": 2000011,
"file_path": "results/Huggy2/Huggy/Huggy-2000011.onnx",
"reward": 3.7995614675011966,
"creation_time": 1710139754.7285855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000011.pt"
]
}
],
"final_checkpoint": {
"steps": 2000011,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7995614675011966,
"creation_time": 1710139754.7285855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000011.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}