ppo-Huggy / run_logs /training_status.json
NatashaN's picture
Huggy
5349df8
{
"Huggy": {
"checkpoints": [
{
"steps": 199950,
"file_path": "results/Huggy/Huggy/Huggy-199950.onnx",
"reward": 3.4801060680080864,
"creation_time": 1682188917.4233956,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199950.pt"
]
},
{
"steps": 399909,
"file_path": "results/Huggy/Huggy/Huggy-399909.onnx",
"reward": 3.8274477927221193,
"creation_time": 1682189143.5494869,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399909.pt"
]
},
{
"steps": 599971,
"file_path": "results/Huggy/Huggy/Huggy-599971.onnx",
"reward": 3.808477188859667,
"creation_time": 1682189373.9857087,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599971.pt"
]
},
{
"steps": 799357,
"file_path": "results/Huggy/Huggy/Huggy-799357.onnx",
"reward": 3.6556606931905997,
"creation_time": 1682189601.5974753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799357.pt"
]
},
{
"steps": 999915,
"file_path": "results/Huggy/Huggy/Huggy-999915.onnx",
"reward": 3.8179806428296224,
"creation_time": 1682189835.3692493,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999915.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy/Huggy/Huggy-1199985.onnx",
"reward": 3.801130065412232,
"creation_time": 1682190068.3768678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy/Huggy/Huggy-1399951.onnx",
"reward": 3.7761644551920335,
"creation_time": 1682190296.7333267,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599989,
"file_path": "results/Huggy/Huggy/Huggy-1599989.onnx",
"reward": 3.7662664023312655,
"creation_time": 1682190529.3453946,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599989.pt"
]
},
{
"steps": 1799955,
"file_path": "results/Huggy/Huggy/Huggy-1799955.onnx",
"reward": 3.554974284127494,
"creation_time": 1682190762.3187444,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799955.pt"
]
},
{
"steps": 1999967,
"file_path": "results/Huggy/Huggy/Huggy-1999967.onnx",
"reward": 3.77506589392821,
"creation_time": 1682190994.4051378,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999967.pt"
]
},
{
"steps": 2000092,
"file_path": "results/Huggy/Huggy/Huggy-2000092.onnx",
"reward": 3.889153017997742,
"creation_time": 1682190994.5288558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000092.pt"
]
}
],
"final_checkpoint": {
"steps": 2000092,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.889153017997742,
"creation_time": 1682190994.5288558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000092.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}