ppo-Huggy / run_logs /training_status.json
Pranjal-666's picture
Huggy
f544f70
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199935,
"file_path": "results/Huggy/Huggy/Huggy-199935.onnx",
"reward": 3.4418033688299117,
"creation_time": 1687421138.295135,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199935.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy/Huggy/Huggy-399981.onnx",
"reward": 3.5173574214550984,
"creation_time": 1687421374.4823537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy/Huggy/Huggy-599938.onnx",
"reward": 3.6602597928816274,
"creation_time": 1687421615.4342978,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.7916511193702096,
"creation_time": 1687421852.9472497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy/Huggy/Huggy-999996.onnx",
"reward": 3.4444247968269117,
"creation_time": 1687422096.3067038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy/Huggy/Huggy-1199963.onnx",
"reward": 4.06526604263406,
"creation_time": 1687422339.0736043,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399847,
"file_path": "results/Huggy/Huggy/Huggy-1399847.onnx",
"reward": 3.2786976800245395,
"creation_time": 1687422581.8279288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399847.pt"
]
},
{
"steps": 1599980,
"file_path": "results/Huggy/Huggy/Huggy-1599980.onnx",
"reward": 3.872122647300843,
"creation_time": 1687422822.1179676,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599980.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 4.0635395616504315,
"creation_time": 1687423066.104859,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999953,
"file_path": "results/Huggy/Huggy/Huggy-1999953.onnx",
"reward": 3.778500185737127,
"creation_time": 1687423310.6390698,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999953.pt"
]
},
{
"steps": 2000055,
"file_path": "results/Huggy/Huggy/Huggy-2000055.onnx",
"reward": 3.7638035833835604,
"creation_time": 1687423310.83041,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
],
"final_checkpoint": {
"steps": 2000055,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7638035833835604,
"creation_time": 1687423310.83041,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}