PPO-Huggy / run_logs /training_status.json
NorbertRop's picture
Huggy
2f23da2
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199866,
"file_path": "results/Huggy/Huggy/Huggy-199866.onnx",
"reward": 3.2909026255070324,
"creation_time": 1671804232.5320284,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199866.pt"
]
},
{
"steps": 399988,
"file_path": "results/Huggy/Huggy/Huggy-399988.onnx",
"reward": 3.9790257005130543,
"creation_time": 1671804439.0841486,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399988.pt"
]
},
{
"steps": 599996,
"file_path": "results/Huggy/Huggy/Huggy-599996.onnx",
"reward": 4.126525084177653,
"creation_time": 1671804651.7905273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599996.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.9861383499151253,
"creation_time": 1671804861.5179436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999995,
"file_path": "results/Huggy/Huggy/Huggy-999995.onnx",
"reward": 3.9408893950847017,
"creation_time": 1671805074.4644222,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999995.pt"
]
},
{
"steps": 1199350,
"file_path": "results/Huggy/Huggy/Huggy-1199350.onnx",
"reward": 3.4394188821315765,
"creation_time": 1671805289.6052132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199350.pt"
]
},
{
"steps": 1399975,
"file_path": "results/Huggy/Huggy/Huggy-1399975.onnx",
"reward": 3.676513421584956,
"creation_time": 1671805500.0271566,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399975.pt"
]
},
{
"steps": 1599985,
"file_path": "results/Huggy/Huggy/Huggy-1599985.onnx",
"reward": 3.7555174147341885,
"creation_time": 1671805715.4655178,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599985.pt"
]
},
{
"steps": 1799976,
"file_path": "results/Huggy/Huggy/Huggy-1799976.onnx",
"reward": 3.9528173221631,
"creation_time": 1671805937.3455431,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799976.pt"
]
},
{
"steps": 1999967,
"file_path": "results/Huggy/Huggy/Huggy-1999967.onnx",
"reward": 3.880592845104359,
"creation_time": 1671806157.217835,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999967.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy/Huggy/Huggy-2000033.onnx",
"reward": 3.7931458055973053,
"creation_time": 1671806157.3353515,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7931458055973053,
"creation_time": 1671806157.3353515,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}