ppo-Huggy / run_logs /training_status.json
torkable's picture
Huggy
0e55b43
{
"Huggy": {
"checkpoints": [
{
"steps": 199789,
"file_path": "results/Huggy/Huggy/Huggy-199789.onnx",
"reward": 3.4495012778788805,
"creation_time": 1695271105.396678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199789.pt"
]
},
{
"steps": 399865,
"file_path": "results/Huggy/Huggy/Huggy-399865.onnx",
"reward": 3.873215024885924,
"creation_time": 1695271292.3687196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399865.pt"
]
},
{
"steps": 599943,
"file_path": "results/Huggy/Huggy/Huggy-599943.onnx",
"reward": 3.7201862523430274,
"creation_time": 1695271479.9978995,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599943.pt"
]
},
{
"steps": 799902,
"file_path": "results/Huggy/Huggy/Huggy-799902.onnx",
"reward": 3.8381369953016633,
"creation_time": 1695271666.438609,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799902.pt"
]
},
{
"steps": 999941,
"file_path": "results/Huggy/Huggy/Huggy-999941.onnx",
"reward": 3.9633996586763223,
"creation_time": 1695271856.8638482,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999941.pt"
]
},
{
"steps": 1199972,
"file_path": "results/Huggy/Huggy/Huggy-1199972.onnx",
"reward": 4.189462208747864,
"creation_time": 1695272048.017048,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199972.pt"
]
},
{
"steps": 1399818,
"file_path": "results/Huggy/Huggy/Huggy-1399818.onnx",
"reward": 3.7545072063803673,
"creation_time": 1695272238.9214656,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399818.pt"
]
},
{
"steps": 1599953,
"file_path": "results/Huggy/Huggy/Huggy-1599953.onnx",
"reward": 4.0665554078064154,
"creation_time": 1695272426.6422243,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599953.pt"
]
},
{
"steps": 1799995,
"file_path": "results/Huggy/Huggy/Huggy-1799995.onnx",
"reward": 3.877195700522392,
"creation_time": 1695272618.2291312,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799995.pt"
]
},
{
"steps": 1999963,
"file_path": "results/Huggy/Huggy/Huggy-1999963.onnx",
"reward": 3.930140233876412,
"creation_time": 1695272809.0443783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999963.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy/Huggy/Huggy-2000043.onnx",
"reward": 3.9611143798663697,
"creation_time": 1695272809.1576366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9611143798663697,
"creation_time": 1695272809.1576366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}