|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.3.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"hub_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": true, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"latency": true, |
|
"memory": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.29792, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.217-205.860.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.2.1", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.42.0.dev0", |
|
"transformers_commit": "22b41b3f8a5cdb37e686d18d8d9a24eb98a331ec", |
|
"accelerate_version": "0.32.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.21.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.11.2.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1918.640128, |
|
"max_global_vram": 6094.848, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 5379.19488, |
|
"max_allocated": 5021.251584 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.028458623886108397, |
|
"mean": 0.014229311943054199, |
|
"stdev": 0.0002845439910888672, |
|
"p50": 0.014229311943054199, |
|
"p90": 0.014456947135925293, |
|
"p95": 0.014485401535034179, |
|
"p99": 0.014508165054321289, |
|
"values": [ |
|
0.014513855934143066, |
|
0.013944767951965331 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 491.9422687487664 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 2262.540288, |
|
"max_global_vram": 8907.128832, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 5379.19488, |
|
"max_allocated": 5027.089408 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 3.1481312255859377, |
|
"mean": 1.5740656127929689, |
|
"stdev": 0.01655999755859383, |
|
"p50": 1.5740656127929689, |
|
"p90": 1.5873136108398438, |
|
"p95": 1.5889696105957032, |
|
"p99": 1.5902944104003907, |
|
"values": [ |
|
1.5906256103515626, |
|
1.557505615234375 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 80.68278664359835 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 253, |
|
"total": 3.1491850204467755, |
|
"mean": 0.01244737162231928, |
|
"stdev": 0.0009020253063466917, |
|
"p50": 0.01227673625946045, |
|
"p90": 0.012713574028015137, |
|
"p95": 0.01273016300201416, |
|
"p99": 0.012817694702148438, |
|
"values": [ |
|
0.012660736083984376, |
|
0.012681216239929198, |
|
0.01278771209716797, |
|
0.012776448249816894, |
|
0.012670975685119629, |
|
0.012741632461547851, |
|
0.012703743934631348, |
|
0.012752896308898925, |
|
0.01268838405609131, |
|
0.012711935997009278, |
|
0.012669952392578124, |
|
0.012719103813171387, |
|
0.012658687591552734, |
|
0.012703743934631348, |
|
0.012679167747497559, |
|
0.012724224090576173, |
|
0.012707839965820313, |
|
0.012722175598144531, |
|
0.012702719688415527, |
|
0.012727295875549317, |
|
0.01265664005279541, |
|
0.012709888458251953, |
|
0.01268019199371338, |
|
0.012725248336791992, |
|
0.012722175598144531, |
|
0.012708864212036132, |
|
0.01266380786895752, |
|
0.012724224090576173, |
|
0.012669952392578124, |
|
0.01274060821533203, |
|
0.012692480087280274, |
|
0.012713983535766601, |
|
0.012667903900146485, |
|
0.012689408302307128, |
|
0.012637184143066407, |
|
0.0126310396194458, |
|
0.012660736083984376, |
|
0.012701696395874023, |
|
0.012645376205444337, |
|
0.012701696395874023, |
|
0.01262284755706787, |
|
0.012734463691711426, |
|
0.012722175598144531, |
|
0.012725248336791992, |
|
0.012696576118469239, |
|
0.012724224090576173, |
|
0.012646400451660156, |
|
0.0127457275390625, |
|
0.012710911750793457, |
|
0.012735487937927246, |
|
0.01265664005279541, |
|
0.012710911750793457, |
|
0.012675071716308594, |
|
0.012735487937927246, |
|
0.012658687591552734, |
|
0.012723199844360352, |
|
0.012678144454956054, |
|
0.012751872062683106, |
|
0.012682239532470703, |
|
0.012726271629333496, |
|
0.01267199993133545, |
|
0.012700672149658204, |
|
0.012660736083984376, |
|
0.012699647903442383, |
|
0.012592127799987793, |
|
0.012612607955932617, |
|
0.012456959724426269, |
|
0.012705792427062988, |
|
0.012298239707946777, |
|
0.012421119689941406, |
|
0.012484607696533203, |
|
0.012527615547180175, |
|
0.012682239532470703, |
|
0.01285632038116455, |
|
0.012585984230041505, |
|
0.012495871543884277, |
|
0.01264742374420166, |
|
0.012443648338317872, |
|
0.012412927627563476, |
|
0.012309503555297852, |
|
0.012279808044433594, |
|
0.012313599586486817, |
|
0.01225830364227295, |
|
0.012279808044433594, |
|
0.01225216007232666, |
|
0.012262399673461915, |
|
0.012231679916381836, |
|
0.012278783798217773, |
|
0.01222758388519287, |
|
0.012285951614379884, |
|
0.012226559638977052, |
|
0.012270591735839843, |
|
0.012224512100219726, |
|
0.012285951614379884, |
|
0.01222758388519287, |
|
0.012280832290649414, |
|
0.012259327888488769, |
|
0.012301312446594239, |
|
0.012223551750183106, |
|
0.012269503593444824, |
|
0.012232704162597656, |
|
0.012267519950866699, |
|
0.012232704162597656, |
|
0.012272640228271485, |
|
0.012226559638977052, |
|
0.012263423919677734, |
|
0.012232704162597656, |
|
0.012285951614379884, |
|
0.0122357759475708, |
|
0.012267583847045899, |
|
0.012242879867553711, |
|
0.012336128234863282, |
|
0.012477439880371094, |
|
0.012446720123291016, |
|
0.01226854419708252, |
|
0.012278783798217773, |
|
0.01225011157989502, |
|
0.012273664474487305, |
|
0.012244992256164551, |
|
0.012278783798217773, |
|
0.012239871978759765, |
|
0.01226854419708252, |
|
0.012257280349731446, |
|
0.012297216415405274, |
|
0.012230655670166016, |
|
0.012292096138000488, |
|
0.026411008834838868, |
|
0.01224396800994873, |
|
0.012285951614379884, |
|
0.01223475170135498, |
|
0.012279808044433594, |
|
0.012228608131408691, |
|
0.012303359985351562, |
|
0.012239871978759765, |
|
0.012270591735839843, |
|
0.012241920471191407, |
|
0.012275712013244629, |
|
0.012228608131408691, |
|
0.01227673625946045, |
|
0.01223475170135498, |
|
0.012271615982055664, |
|
0.012225536346435547, |
|
0.012271615982055664, |
|
0.012247103691101074, |
|
0.012271552085876465, |
|
0.012236800193786621, |
|
0.012274687767028808, |
|
0.012244992256164551, |
|
0.012309503555297852, |
|
0.012229632377624512, |
|
0.012270591735839843, |
|
0.012246015548706055, |
|
0.012275712013244629, |
|
0.012238847732543945, |
|
0.012270591735839843, |
|
0.012225536346435547, |
|
0.012272640228271485, |
|
0.012264448165893555, |
|
0.012292096138000488, |
|
0.012230655670166016, |
|
0.012274687767028808, |
|
0.012224543571472167, |
|
0.012283871650695801, |
|
0.01224294376373291, |
|
0.01226956844329834, |
|
0.012220416069030762, |
|
0.012267519950866699, |
|
0.01224396800994873, |
|
0.012281855583190919, |
|
0.012231679916381836, |
|
0.012263423919677734, |
|
0.012226559638977052, |
|
0.012277759552001954, |
|
0.012222463607788087, |
|
0.01227673625946045, |
|
0.012217344284057617, |
|
0.012271615982055664, |
|
0.012221440315246582, |
|
0.012291071891784668, |
|
0.012241920471191407, |
|
0.01226956844329834, |
|
0.012229632377624512, |
|
0.012272640228271485, |
|
0.012233728408813477, |
|
0.012278783798217773, |
|
0.012216320037841797, |
|
0.012265472412109376, |
|
0.012223487854003906, |
|
0.012275712013244629, |
|
0.012232704162597656, |
|
0.012279808044433594, |
|
0.01223475170135498, |
|
0.01226956844329834, |
|
0.012220416069030762, |
|
0.012291071891784668, |
|
0.012237824440002442, |
|
0.012280832290649414, |
|
0.012231679916381836, |
|
0.012271615982055664, |
|
0.012236800193786621, |
|
0.012266495704650878, |
|
0.012214271545410157, |
|
0.012272640228271485, |
|
0.01221939182281494, |
|
0.012288000106811523, |
|
0.012237824440002442, |
|
0.012347392082214356, |
|
0.01233510398864746, |
|
0.012850175857543946, |
|
0.012255231857299804, |
|
0.012300288200378418, |
|
0.012240896224975586, |
|
0.01228492832183838, |
|
0.012236800193786621, |
|
0.012275712013244629, |
|
0.012231679916381836, |
|
0.012289024353027344, |
|
0.012221440315246582, |
|
0.012283904075622559, |
|
0.012230655670166016, |
|
0.012281855583190919, |
|
0.012236800193786621, |
|
0.012283904075622559, |
|
0.012257280349731446, |
|
0.012272640228271485, |
|
0.012225536346435547, |
|
0.012286975860595703, |
|
0.0122357759475708, |
|
0.012266495704650878, |
|
0.012216320037841797, |
|
0.012274687767028808, |
|
0.01225011157989502, |
|
0.012266495704650878, |
|
0.012232704162597656, |
|
0.012271615982055664, |
|
0.012228608131408691, |
|
0.012271615982055664, |
|
0.012248064041137695, |
|
0.012321791648864745, |
|
0.012278783798217773, |
|
0.01227673625946045, |
|
0.012236800193786621, |
|
0.012290047645568849, |
|
0.012233728408813477, |
|
0.012274687767028808, |
|
0.012231679916381836, |
|
0.012273664474487305, |
|
0.012232704162597656, |
|
0.012298239707946777, |
|
0.01222867202758789, |
|
0.012271552085876465, |
|
0.01223475170135498, |
|
0.012278783798217773 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 80.33824572304952 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |