ppo-Huggy / run_logs /training_status.json
jj97's picture
Huggy
4b3b216 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199959,
"file_path": "results/Huggy2/Huggy/Huggy-199959.onnx",
"reward": 3.460497885942459,
"creation_time": 1722582771.4052145,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199959.pt"
]
},
{
"steps": 399886,
"file_path": "results/Huggy2/Huggy/Huggy-399886.onnx",
"reward": 3.9490087316997013,
"creation_time": 1722583052.4199674,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399886.pt"
]
},
{
"steps": 599919,
"file_path": "results/Huggy2/Huggy/Huggy-599919.onnx",
"reward": 4.2891166045747955,
"creation_time": 1722583336.979281,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599919.pt"
]
},
{
"steps": 799972,
"file_path": "results/Huggy2/Huggy/Huggy-799972.onnx",
"reward": 3.7517176451890366,
"creation_time": 1722583615.3269203,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799972.pt"
]
},
{
"steps": 999984,
"file_path": "results/Huggy2/Huggy/Huggy-999984.onnx",
"reward": 3.939157873612863,
"creation_time": 1722583902.2070274,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999984.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy2/Huggy/Huggy-1199999.onnx",
"reward": 3.8111687107164354,
"creation_time": 1722584192.200843,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399959,
"file_path": "results/Huggy2/Huggy/Huggy-1399959.onnx",
"reward": 2.577651083469391,
"creation_time": 1722584478.2695782,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399959.pt"
]
},
{
"steps": 1599541,
"file_path": "results/Huggy2/Huggy/Huggy-1599541.onnx",
"reward": 3.6329325033234547,
"creation_time": 1722584757.1756523,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599541.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy2/Huggy/Huggy-1799954.onnx",
"reward": 3.515819975840194,
"creation_time": 1722585060.0509253,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999996,
"file_path": "results/Huggy2/Huggy/Huggy-1999996.onnx",
"reward": 3.563833956917127,
"creation_time": 1722585362.7801878,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999996.pt"
]
},
{
"steps": 2000090,
"file_path": "results/Huggy2/Huggy/Huggy-2000090.onnx",
"reward": 3.5879589593088306,
"creation_time": 1722585363.6309805,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000090.pt"
]
}
],
"final_checkpoint": {
"steps": 2000090,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5879589593088306,
"creation_time": 1722585363.6309805,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000090.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}