ppo-Huggy / run_logs /training_status.json
austinmw's picture
Huggy
cc7a8a2
raw
history blame
No virus
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199884,
"file_path": "results/Huggy/Huggy/Huggy-199884.onnx",
"reward": 3.824899011188083,
"creation_time": 1675626396.876103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199884.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy/Huggy/Huggy-399989.onnx",
"reward": 3.6905176418168204,
"creation_time": 1675626627.9979103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599956,
"file_path": "results/Huggy/Huggy/Huggy-599956.onnx",
"reward": 4.315157418665678,
"creation_time": 1675626861.6755242,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599956.pt"
]
},
{
"steps": 799888,
"file_path": "results/Huggy/Huggy/Huggy-799888.onnx",
"reward": 3.5978234811704985,
"creation_time": 1675627095.5119712,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799888.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy/Huggy/Huggy-999981.onnx",
"reward": 3.661464886622386,
"creation_time": 1675627333.4916015,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199959,
"file_path": "results/Huggy/Huggy/Huggy-1199959.onnx",
"reward": 3.194545630146475,
"creation_time": 1675627569.4188137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199959.pt"
]
},
{
"steps": 1399940,
"file_path": "results/Huggy/Huggy/Huggy-1399940.onnx",
"reward": 3.7449554230997473,
"creation_time": 1675627802.2720087,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399940.pt"
]
},
{
"steps": 1599636,
"file_path": "results/Huggy/Huggy/Huggy-1599636.onnx",
"reward": 3.725289183250372,
"creation_time": 1675628037.7161963,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599636.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy/Huggy/Huggy-1799972.onnx",
"reward": 3.873872555695571,
"creation_time": 1675628275.9469879,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 4.439496636390686,
"creation_time": 1675628510.0395644,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000023,
"file_path": "results/Huggy/Huggy/Huggy-2000023.onnx",
"reward": 3.964333152770996,
"creation_time": 1675628510.154282,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
],
"final_checkpoint": {
"steps": 2000023,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.964333152770996,
"creation_time": 1675628510.154282,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}