ppo-Huggy-v2 / run_logs /training_status.json
rlap28's picture
Huggy-v2
b2e8b68 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199861,
"file_path": "results/Huggy2/Huggy/Huggy-199861.onnx",
"reward": 3.4918134053548178,
"creation_time": 1723639503.8777413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199861.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy2/Huggy/Huggy-399989.onnx",
"reward": 3.7835072345203824,
"creation_time": 1723639742.0847168,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy2/Huggy/Huggy-599966.onnx",
"reward": 4.121100389040434,
"creation_time": 1723639983.1216578,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799987,
"file_path": "results/Huggy2/Huggy/Huggy-799987.onnx",
"reward": 3.6887979081077296,
"creation_time": 1723640222.6933463,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799987.pt"
]
},
{
"steps": 999334,
"file_path": "results/Huggy2/Huggy/Huggy-999334.onnx",
"reward": 3.3256675170527563,
"creation_time": 1723640463.8271892,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999334.pt"
]
},
{
"steps": 1199881,
"file_path": "results/Huggy2/Huggy/Huggy-1199881.onnx",
"reward": 3.771219491958618,
"creation_time": 1723640714.2380311,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199881.pt"
]
},
{
"steps": 1399988,
"file_path": "results/Huggy2/Huggy/Huggy-1399988.onnx",
"reward": 3.4465784375963646,
"creation_time": 1723640951.1541076,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399988.pt"
]
},
{
"steps": 1599956,
"file_path": "results/Huggy2/Huggy/Huggy-1599956.onnx",
"reward": 3.5232029740626993,
"creation_time": 1723641190.4840922,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599956.pt"
]
},
{
"steps": 1799404,
"file_path": "results/Huggy2/Huggy/Huggy-1799404.onnx",
"reward": 3.5362294170591566,
"creation_time": 1723641431.4962451,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799404.pt"
]
},
{
"steps": 1999548,
"file_path": "results/Huggy2/Huggy/Huggy-1999548.onnx",
"reward": 3.657469600100409,
"creation_time": 1723641669.6395605,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999548.pt"
]
},
{
"steps": 2000298,
"file_path": "results/Huggy2/Huggy/Huggy-2000298.onnx",
"reward": 3.616120639830659,
"creation_time": 1723641669.8844068,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000298.pt"
]
}
],
"final_checkpoint": {
"steps": 2000298,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.616120639830659,
"creation_time": 1723641669.8844068,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000298.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}