ppo-Huggy / run_logs /training_status.json
ashutosh1919's picture
Huggy
bd99f1a
{
"Huggy": {
"checkpoints": [
{
"steps": 199967,
"file_path": "results/Huggy/Huggy/Huggy-199967.onnx",
"reward": 3.4278482620532698,
"creation_time": 1672941272.5686557,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399869,
"file_path": "results/Huggy/Huggy/Huggy-399869.onnx",
"reward": 4.177074774256292,
"creation_time": 1672941509.6861467,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399869.pt"
]
},
{
"steps": 599952,
"file_path": "results/Huggy/Huggy/Huggy-599952.onnx",
"reward": 3.1665485377969413,
"creation_time": 1672941744.904894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599952.pt"
]
},
{
"steps": 799991,
"file_path": "results/Huggy/Huggy/Huggy-799991.onnx",
"reward": 3.742651476654955,
"creation_time": 1672941981.1732955,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799991.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy/Huggy/Huggy-999981.onnx",
"reward": 3.7489019678189206,
"creation_time": 1672942223.2446578,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy/Huggy/Huggy-1199963.onnx",
"reward": 3.1343656182289124,
"creation_time": 1672942462.8336067,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399987,
"file_path": "results/Huggy/Huggy/Huggy-1399987.onnx",
"reward": 3.6335905118727347,
"creation_time": 1672942698.791687,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399987.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy/Huggy/Huggy-1599982.onnx",
"reward": 3.7528777245817513,
"creation_time": 1672942941.6650562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799931,
"file_path": "results/Huggy/Huggy/Huggy-1799931.onnx",
"reward": 4.086755497481233,
"creation_time": 1672943184.8774571,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799931.pt"
]
},
{
"steps": 1999953,
"file_path": "results/Huggy/Huggy/Huggy-1999953.onnx",
"reward": 3.855992089819025,
"creation_time": 1672943424.4566472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999953.pt"
]
},
{
"steps": 2000024,
"file_path": "results/Huggy/Huggy/Huggy-2000024.onnx",
"reward": 3.825834702168192,
"creation_time": 1672943424.5804224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000024.pt"
]
}
],
"final_checkpoint": {
"steps": 2000024,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.825834702168192,
"creation_time": 1672943424.5804224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000024.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}