poca-SoccerTwos / run_logs /timers.json
fangyima's picture
First Push`
13b27fd verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5598649978637695,
"min": 1.4148043394088745,
"max": 3.2957513332366943,
"count": 3124
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 29300.50390625,
"min": 16600.056640625,
"max": 149917.59375,
"count": 3124
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 65.76315789473684,
"min": 40.586776859504134,
"max": 999.0,
"count": 3124
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19992.0,
"min": 1200.0,
"max": 30420.0,
"count": 3124
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1744.630578303296,
"min": 1178.241648118888,
"max": 1763.201907177039,
"count": 3095
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 265183.847902101,
"min": 2356.483296237776,
"max": 401242.281178166,
"count": 3095
},
"SoccerTwos.Step.mean": {
"value": 31239980.0,
"min": 9440.0,
"max": 31239980.0,
"count": 3124
},
"SoccerTwos.Step.sum": {
"value": 31239980.0,
"min": 9440.0,
"max": 31239980.0,
"count": 3124
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.004775442648679018,
"min": -0.17131519317626953,
"max": 0.22156104445457458,
"count": 3124
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -0.7210918068885803,
"min": -24.326757431030273,
"max": 32.56947326660156,
"count": 3124
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.00278552807867527,
"min": -0.16930297017097473,
"max": 0.21557892858982086,
"count": 3124
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.42061474919319153,
"min": -24.06451416015625,
"max": 31.690101623535156,
"count": 3124
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 3124
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 3124
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.01713377632052693,
"min": -0.6,
"max": 0.5466086924400019,
"count": 3124
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -2.5872002243995667,
"min": -64.31620037555695,
"max": 50.80800008773804,
"count": 3124
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.01713377632052693,
"min": -0.6,
"max": 0.5466086924400019,
"count": 3124
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -2.5872002243995667,
"min": -64.31620037555695,
"max": 50.80800008773804,
"count": 3124
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 3124
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 3124
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017738105164607988,
"min": 0.010973652174773937,
"max": 0.024367449737231556,
"count": 1512
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017738105164607988,
"min": 0.010973652174773937,
"max": 0.024367449737231556,
"count": 1512
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10335170576969782,
"min": 3.163041189206221e-06,
"max": 0.1284737418095271,
"count": 1512
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10335170576969782,
"min": 3.163041189206221e-06,
"max": 0.1284737418095271,
"count": 1512
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10453864932060242,
"min": 3.2997339189932973e-06,
"max": 0.13083223700523378,
"count": 1512
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10453864932060242,
"min": 3.2997339189932973e-06,
"max": 0.13083223700523378,
"count": 1512
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 1512
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 1512
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 1512
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 1512
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 1512
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 1512
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1740983605",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\mechmofo\\.conda\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu126",
"numpy_version": "1.23.5",
"end_time_seconds": "1741029112"
},
"total": 45507.78440250001,
"count": 1,
"self": 1.0283526000130223,
"children": {
"run_training.setup": {
"total": 0.06862109999929089,
"count": 1,
"self": 0.06862109999929089
},
"TrainerController.start_learning": {
"total": 45506.687428799996,
"count": 1,
"self": 40.53003490436822,
"children": {
"TrainerController._reset_env": {
"total": 6.264219500029867,
"count": 157,
"self": 6.264219500029867
},
"TrainerController.advance": {
"total": 45459.74290179561,
"count": 2160408,
"self": 38.56875520146423,
"children": {
"env_step": {
"total": 32253.99747030285,
"count": 2160408,
"self": 18753.824297593797,
"children": {
"SubprocessEnvManager._take_step": {
"total": 13475.990711609797,
"count": 2160408,
"self": 231.60722509941115,
"children": {
"TorchPolicy.evaluate": {
"total": 13244.383486510385,
"count": 3926496,
"self": 13244.383486510385
}
}
},
"workers": {
"total": 24.182461099255306,
"count": 2160407,
"self": 0.0,
"children": {
"worker_root": {
"total": 45453.943528405434,
"count": 2160407,
"is_parallel": true,
"self": 30874.416574208873,
"children": {
"steps_from_proto": {
"total": 0.18851700005325256,
"count": 314,
"is_parallel": true,
"self": 0.037206400142167695,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.15131059991108486,
"count": 1256,
"is_parallel": true,
"self": 0.15131059991108486
}
}
},
"UnityEnvironment.step": {
"total": 14579.338437196508,
"count": 2160407,
"is_parallel": true,
"self": 705.5183977148263,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 603.0071491958151,
"count": 2160407,
"is_parallel": true,
"self": 603.0071491958151
},
"communicator.exchange": {
"total": 10929.271967792018,
"count": 2160407,
"is_parallel": true,
"self": 10929.271967792018
},
"steps_from_proto": {
"total": 2341.5409224938485,
"count": 4320814,
"is_parallel": true,
"self": 477.9007207722243,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1863.6402017216242,
"count": 17283256,
"is_parallel": true,
"self": 1863.6402017216242
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 13167.176676291296,
"count": 2160407,
"self": 261.0154291885701,
"children": {
"process_trajectory": {
"total": 6870.0783336030145,
"count": 2160407,
"self": 6860.200931202991,
"children": {
"RLTrainer._checkpoint": {
"total": 9.87740240002313,
"count": 62,
"self": 9.87740240002313
}
}
},
"_update_policy": {
"total": 6036.082913499711,
"count": 1513,
"self": 3488.6145823999395,
"children": {
"TorchPOCAOptimizer.update": {
"total": 2547.4683310997716,
"count": 45402,
"self": 2547.4683310997716
}
}
}
}
}
}
},
"trainer_threads": {
"total": 3.2999960239976645e-06,
"count": 1,
"self": 3.2999960239976645e-06
},
"TrainerController._save_models": {
"total": 0.15026929999294225,
"count": 1,
"self": 0.036373599999933504,
"children": {
"RLTrainer._checkpoint": {
"total": 0.11389569999300875,
"count": 1,
"self": 0.11389569999300875
}
}
}
}
}
}
}