ppo-Huggy / run_logs /training_status.json
mabrouk's picture
Huggy
f4f4e13
{
"Huggy": {
"checkpoints": [
{
"steps": 199711,
"file_path": "results/Huggy/Huggy/Huggy-199711.onnx",
"reward": 3.2171853225806664,
"creation_time": 1696459064.0061982,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199711.pt"
]
},
{
"steps": 399945,
"file_path": "results/Huggy/Huggy/Huggy-399945.onnx",
"reward": 3.6428364269317144,
"creation_time": 1696459327.9795945,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399945.pt"
]
},
{
"steps": 599866,
"file_path": "results/Huggy/Huggy/Huggy-599866.onnx",
"reward": 4.448782426970346,
"creation_time": 1696459590.005195,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599866.pt"
]
},
{
"steps": 799967,
"file_path": "results/Huggy/Huggy/Huggy-799967.onnx",
"reward": 3.8496902691569304,
"creation_time": 1696459855.773914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799967.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy/Huggy/Huggy-999956.onnx",
"reward": 3.8893741657809606,
"creation_time": 1696460124.699484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199936,
"file_path": "results/Huggy/Huggy/Huggy-1199936.onnx",
"reward": 3.6872841342171627,
"creation_time": 1696460395.736722,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199936.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy/Huggy/Huggy-1399938.onnx",
"reward": 3.8518443509232845,
"creation_time": 1696460660.3390343,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599597,
"file_path": "results/Huggy/Huggy/Huggy-1599597.onnx",
"reward": 3.807038167119026,
"creation_time": 1696460930.7197814,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599597.pt"
]
},
{
"steps": 1799789,
"file_path": "results/Huggy/Huggy/Huggy-1799789.onnx",
"reward": 3.835959889270641,
"creation_time": 1696461192.161119,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799789.pt"
]
},
{
"steps": 1999932,
"file_path": "results/Huggy/Huggy/Huggy-1999932.onnx",
"reward": 3.1917334124445915,
"creation_time": 1696461449.6532042,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999932.pt"
]
},
{
"steps": 2000053,
"file_path": "results/Huggy/Huggy/Huggy-2000053.onnx",
"reward": 3.3106040042989395,
"creation_time": 1696461449.7713597,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000053.pt"
]
}
],
"final_checkpoint": {
"steps": 2000053,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3106040042989395,
"creation_time": 1696461449.7713597,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000053.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}