ppo-Huggy / run_logs /training_status.json
ajankelo's picture
Huggy
ff3adf7
{
"Huggy": {
"checkpoints": [
{
"steps": 199991,
"file_path": "results/Huggy/Huggy/Huggy-199991.onnx",
"reward": 3.304852973012363,
"creation_time": 1679024987.756942,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199991.pt"
]
},
{
"steps": 399912,
"file_path": "results/Huggy/Huggy/Huggy-399912.onnx",
"reward": 3.751525804616403,
"creation_time": 1679025192.7422493,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399912.pt"
]
},
{
"steps": 599582,
"file_path": "results/Huggy/Huggy/Huggy-599582.onnx",
"reward": 3.627306356430054,
"creation_time": 1679025400.8915334,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599582.pt"
]
},
{
"steps": 799973,
"file_path": "results/Huggy/Huggy/Huggy-799973.onnx",
"reward": 3.914375245226601,
"creation_time": 1679025602.658259,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799973.pt"
]
},
{
"steps": 999896,
"file_path": "results/Huggy/Huggy/Huggy-999896.onnx",
"reward": 3.5022630059719084,
"creation_time": 1679025804.5423453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999896.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.6808718195501364,
"creation_time": 1679026004.0847492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399971,
"file_path": "results/Huggy/Huggy/Huggy-1399971.onnx",
"reward": 3.866328210155399,
"creation_time": 1679026200.08231,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399971.pt"
]
},
{
"steps": 1599955,
"file_path": "results/Huggy/Huggy/Huggy-1599955.onnx",
"reward": 3.6719101919896073,
"creation_time": 1679026400.8126333,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599955.pt"
]
},
{
"steps": 1799744,
"file_path": "results/Huggy/Huggy/Huggy-1799744.onnx",
"reward": 3.510413237282487,
"creation_time": 1679026605.8914852,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799744.pt"
]
},
{
"steps": 1999472,
"file_path": "results/Huggy/Huggy/Huggy-1999472.onnx",
"reward": null,
"creation_time": 1679026811.0783176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999472.pt"
]
},
{
"steps": 2000222,
"file_path": "results/Huggy/Huggy/Huggy-2000222.onnx",
"reward": -2.946599245071411,
"creation_time": 1679026811.2158098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000222.pt"
]
}
],
"final_checkpoint": {
"steps": 2000222,
"file_path": "results/Huggy/Huggy.onnx",
"reward": -2.946599245071411,
"creation_time": 1679026811.2158098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000222.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}