ppo-Huggy / run_logs /training_status.json
Nurmukhamed's picture
Huggy
abc5a18
{
"Huggy": {
"checkpoints": [
{
"steps": 199995,
"file_path": "results/Huggy/Huggy/Huggy-199995.onnx",
"reward": 3.4032966071559536,
"creation_time": 1692759413.0934427,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199995.pt"
]
},
{
"steps": 399847,
"file_path": "results/Huggy/Huggy/Huggy-399847.onnx",
"reward": 3.5679150244285323,
"creation_time": 1692759692.642683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399847.pt"
]
},
{
"steps": 599950,
"file_path": "results/Huggy/Huggy/Huggy-599950.onnx",
"reward": 3.5845286215052887,
"creation_time": 1692759970.643646,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599950.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy/Huggy/Huggy-799982.onnx",
"reward": 3.7513339421371135,
"creation_time": 1692760243.9018018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999958,
"file_path": "results/Huggy/Huggy/Huggy-999958.onnx",
"reward": 3.7886916983474808,
"creation_time": 1692760518.466283,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999958.pt"
]
},
{
"steps": 1199847,
"file_path": "results/Huggy/Huggy/Huggy-1199847.onnx",
"reward": 3.5253767586768943,
"creation_time": 1692760795.6013656,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199847.pt"
]
},
{
"steps": 1399916,
"file_path": "results/Huggy/Huggy/Huggy-1399916.onnx",
"reward": 3.8639051880933306,
"creation_time": 1692761065.3518355,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399916.pt"
]
},
{
"steps": 1599986,
"file_path": "results/Huggy/Huggy/Huggy-1599986.onnx",
"reward": 4.013191781811795,
"creation_time": 1692761331.7286942,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599986.pt"
]
},
{
"steps": 1799440,
"file_path": "results/Huggy/Huggy/Huggy-1799440.onnx",
"reward": 3.823989121352925,
"creation_time": 1692761603.4275537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799440.pt"
]
},
{
"steps": 1999888,
"file_path": "results/Huggy/Huggy/Huggy-1999888.onnx",
"reward": 3.5956089868778136,
"creation_time": 1692761867.8799903,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999888.pt"
]
},
{
"steps": 2000017,
"file_path": "results/Huggy/Huggy/Huggy-2000017.onnx",
"reward": 3.611692626499435,
"creation_time": 1692761868.085268,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000017.pt"
]
}
],
"final_checkpoint": {
"steps": 2000017,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.611692626499435,
"creation_time": 1692761868.085268,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000017.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}