ppo-Huggy / run_logs /training_status.json
satvshr's picture
Huggy
edce045 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199847,
"file_path": "results/Huggy2/Huggy/Huggy-199847.onnx",
"reward": 3.584935518733242,
"creation_time": 1723499413.4120035,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199847.pt"
]
},
{
"steps": 399856,
"file_path": "results/Huggy2/Huggy/Huggy-399856.onnx",
"reward": 4.14131266171815,
"creation_time": 1723499652.049272,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399856.pt"
]
},
{
"steps": 599991,
"file_path": "results/Huggy2/Huggy/Huggy-599991.onnx",
"reward": 3.893105177775673,
"creation_time": 1723499894.0427096,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599991.pt"
]
},
{
"steps": 799924,
"file_path": "results/Huggy2/Huggy/Huggy-799924.onnx",
"reward": 3.8797455807238963,
"creation_time": 1723500133.9587684,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799924.pt"
]
},
{
"steps": 999928,
"file_path": "results/Huggy2/Huggy/Huggy-999928.onnx",
"reward": 4.046919457785046,
"creation_time": 1723500379.5579958,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999928.pt"
]
},
{
"steps": 1199942,
"file_path": "results/Huggy2/Huggy/Huggy-1199942.onnx",
"reward": 3.9386030171490924,
"creation_time": 1723500621.951175,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199942.pt"
]
},
{
"steps": 1399912,
"file_path": "results/Huggy2/Huggy/Huggy-1399912.onnx",
"reward": null,
"creation_time": 1723500868.2251623,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399912.pt"
]
},
{
"steps": 1599974,
"file_path": "results/Huggy2/Huggy/Huggy-1599974.onnx",
"reward": 3.8522768739572504,
"creation_time": 1723501110.6886637,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599974.pt"
]
},
{
"steps": 1799981,
"file_path": "results/Huggy2/Huggy/Huggy-1799981.onnx",
"reward": 3.943519052478575,
"creation_time": 1723501357.8808148,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799981.pt"
]
},
{
"steps": 1999980,
"file_path": "results/Huggy2/Huggy/Huggy-1999980.onnx",
"reward": 4.26933851047438,
"creation_time": 1723501605.7986224,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999980.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy2/Huggy/Huggy-2000030.onnx",
"reward": 4.2124736428260805,
"creation_time": 1723501605.9790041,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.2124736428260805,
"creation_time": 1723501605.9790041,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}