ppo-Huggy / run_logs /training_status.json
nkthiebaut's picture
Huggy
edfc00b verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199966,
"file_path": "results/Huggy2/Huggy/Huggy-199966.onnx",
"reward": 3.1632906430727474,
"creation_time": 1740130766.2121215,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199966.pt"
]
},
{
"steps": 399980,
"file_path": "results/Huggy2/Huggy/Huggy-399980.onnx",
"reward": 3.687236322598024,
"creation_time": 1740131000.9849665,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399980.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy2/Huggy/Huggy-599927.onnx",
"reward": 3.8285841579022617,
"creation_time": 1740131236.421214,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799894,
"file_path": "results/Huggy2/Huggy/Huggy-799894.onnx",
"reward": 3.6661705311062267,
"creation_time": 1740131470.4026275,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799894.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy2/Huggy/Huggy-999956.onnx",
"reward": 3.7313776077145207,
"creation_time": 1740131712.2245605,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy2/Huggy/Huggy-1199997.onnx",
"reward": 3.728529390406935,
"creation_time": 1740131949.1728148,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy2/Huggy/Huggy-1399990.onnx",
"reward": 3.9371138413747153,
"creation_time": 1740132194.6125207,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy2/Huggy/Huggy-1599970.onnx",
"reward": 3.881000762864163,
"creation_time": 1740132443.147673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799892,
"file_path": "results/Huggy2/Huggy/Huggy-1799892.onnx",
"reward": 3.582197025174,
"creation_time": 1740132689.3343406,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799892.pt"
]
},
{
"steps": 1999955,
"file_path": "results/Huggy2/Huggy/Huggy-1999955.onnx",
"reward": 3.0924666285514832,
"creation_time": 1740132928.033056,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999955.pt"
]
},
{
"steps": 2000231,
"file_path": "results/Huggy2/Huggy/Huggy-2000231.onnx",
"reward": 3.2829319275915623,
"creation_time": 1740132928.170583,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000231.pt"
]
}
],
"final_checkpoint": {
"steps": 2000231,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.2829319275915623,
"creation_time": 1740132928.170583,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000231.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}