ppo-Huggy / run_logs /training_status.json
robkayinto's picture
Huggy
0d1bf06
{
"Huggy": {
"checkpoints": [
{
"steps": 199983,
"file_path": "results/Huggy/Huggy/Huggy-199983.onnx",
"reward": 3.3457605759302775,
"creation_time": 1679690909.3011253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199983.pt"
]
},
{
"steps": 399889,
"file_path": "results/Huggy/Huggy/Huggy-399889.onnx",
"reward": 3.744452214241028,
"creation_time": 1679691154.1306283,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399889.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy/Huggy/Huggy-599942.onnx",
"reward": 3.598223002893584,
"creation_time": 1679691399.6091504,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799970,
"file_path": "results/Huggy/Huggy/Huggy-799970.onnx",
"reward": 3.851994901384626,
"creation_time": 1679691644.1959786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799970.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy/Huggy/Huggy-999960.onnx",
"reward": 4.011924490884498,
"creation_time": 1679691889.5424368,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy/Huggy/Huggy-1199976.onnx",
"reward": 3.699399713313941,
"creation_time": 1679692135.9807248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399929,
"file_path": "results/Huggy/Huggy/Huggy-1399929.onnx",
"reward": 3.70279788609707,
"creation_time": 1679692373.3203995,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399929.pt"
]
},
{
"steps": 1599966,
"file_path": "results/Huggy/Huggy/Huggy-1599966.onnx",
"reward": 3.497054300374455,
"creation_time": 1679692611.95537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599966.pt"
]
},
{
"steps": 1799929,
"file_path": "results/Huggy/Huggy/Huggy-1799929.onnx",
"reward": 3.818619439768237,
"creation_time": 1679692849.2364964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799929.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy/Huggy/Huggy-1999992.onnx",
"reward": 3.8084478278954825,
"creation_time": 1679693085.8297596,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy/Huggy/Huggy-2000016.onnx",
"reward": 3.7090988826751707,
"creation_time": 1679693086.0081086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7090988826751707,
"creation_time": 1679693086.0081086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}