ppo-Huggy / run_logs /training_status.json
Smone55's picture
Huggy
9a3daea
{
"Huggy": {
"checkpoints": [
{
"steps": 199832,
"file_path": "results/Huggy/Huggy/Huggy-199832.onnx",
"reward": 3.529147231187977,
"creation_time": 1681247190.905254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199832.pt"
]
},
{
"steps": 399895,
"file_path": "results/Huggy/Huggy/Huggy-399895.onnx",
"reward": 3.64179541903027,
"creation_time": 1681247430.024868,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399895.pt"
]
},
{
"steps": 599953,
"file_path": "results/Huggy/Huggy/Huggy-599953.onnx",
"reward": 3.5623111999951877,
"creation_time": 1681247670.0892768,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599953.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy/Huggy/Huggy-799934.onnx",
"reward": 3.6893555335471966,
"creation_time": 1681247911.3037124,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999973,
"file_path": "results/Huggy/Huggy/Huggy-999973.onnx",
"reward": 3.777954240976754,
"creation_time": 1681248156.1492088,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999973.pt"
]
},
{
"steps": 1199949,
"file_path": "results/Huggy/Huggy/Huggy-1199949.onnx",
"reward": 3.639417370160421,
"creation_time": 1681248394.694253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199949.pt"
]
},
{
"steps": 1399518,
"file_path": "results/Huggy/Huggy/Huggy-1399518.onnx",
"reward": 1.6784121990203857,
"creation_time": 1681248633.2034,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399518.pt"
]
},
{
"steps": 1599600,
"file_path": "results/Huggy/Huggy/Huggy-1599600.onnx",
"reward": 3.969075033147084,
"creation_time": 1681248870.107755,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599600.pt"
]
},
{
"steps": 1799926,
"file_path": "results/Huggy/Huggy/Huggy-1799926.onnx",
"reward": 3.731092257425189,
"creation_time": 1681249104.997044,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799926.pt"
]
},
{
"steps": 1999976,
"file_path": "results/Huggy/Huggy/Huggy-1999976.onnx",
"reward": 3.741775224606196,
"creation_time": 1681249346.1133494,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999976.pt"
]
},
{
"steps": 2000094,
"file_path": "results/Huggy/Huggy/Huggy-2000094.onnx",
"reward": 3.8183592319488526,
"creation_time": 1681249346.2360132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000094.pt"
]
}
],
"final_checkpoint": {
"steps": 2000094,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8183592319488526,
"creation_time": 1681249346.2360132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000094.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}