ppo-Huggy / run_logs /training_status.json
codeSpaghetti's picture
Huggy
c6c5642
{
"Huggy": {
"checkpoints": [
{
"steps": 199826,
"file_path": "results/Huggy/Huggy/Huggy-199826.onnx",
"reward": 3.383284472707492,
"creation_time": 1673025004.7429967,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199826.pt"
]
},
{
"steps": 399772,
"file_path": "results/Huggy/Huggy/Huggy-399772.onnx",
"reward": 3.573491282630385,
"creation_time": 1673025248.0547547,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399772.pt"
]
},
{
"steps": 599909,
"file_path": "results/Huggy/Huggy/Huggy-599909.onnx",
"reward": 4.184277733167012,
"creation_time": 1673025490.909917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599909.pt"
]
},
{
"steps": 799917,
"file_path": "results/Huggy/Huggy/Huggy-799917.onnx",
"reward": 3.4905636068936943,
"creation_time": 1673025726.2894332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799917.pt"
]
},
{
"steps": 999974,
"file_path": "results/Huggy/Huggy/Huggy-999974.onnx",
"reward": 3.8557770192021072,
"creation_time": 1673025965.4142952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999974.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 3.842317469741987,
"creation_time": 1673026206.3883877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 3.6192650097172434,
"creation_time": 1673026444.5233476,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599845,
"file_path": "results/Huggy/Huggy/Huggy-1599845.onnx",
"reward": 3.9165444778359455,
"creation_time": 1673026686.7544885,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599845.pt"
]
},
{
"steps": 1799994,
"file_path": "results/Huggy/Huggy/Huggy-1799994.onnx",
"reward": 3.009519684733006,
"creation_time": 1673026924.2130537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799994.pt"
]
},
{
"steps": 1999951,
"file_path": "results/Huggy/Huggy/Huggy-1999951.onnx",
"reward": 3.4825490625178226,
"creation_time": 1673027153.3599992,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999951.pt"
]
},
{
"steps": 2000052,
"file_path": "results/Huggy/Huggy/Huggy-2000052.onnx",
"reward": 3.486873480113777,
"creation_time": 1673027153.4887795,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000052.pt"
]
}
],
"final_checkpoint": {
"steps": 2000052,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.486873480113777,
"creation_time": 1673027153.4887795,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000052.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}