|
{ |
|
"config": { |
|
"name": "pytorch-llama", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.5.1", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "llama", |
|
"model": "meta-llama/Llama-2-7b-chat-hf", |
|
"processor": "meta-llama/Llama-2-7b-chat-hf", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": null, |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": {}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 10, |
|
"duration": 10, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 128 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 32, |
|
"min_new_tokens": 32 |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " Intel(R) Xeon(R) Platinum 8339HC CPU @ 1.80GHz", |
|
"cpu_count": 22, |
|
"cpu_ram_mb": 189584.162816, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.19.0-0_fbk12_hardened_11583_g0bef9520ca2b-x86_64-with-glibc2.34", |
|
"processor": "x86_64", |
|
"python_version": "3.10.15", |
|
"gpu": [ |
|
"NVIDIA PG509-210" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 85899345920, |
|
"optimum_benchmark_version": "0.5.0.dev0", |
|
"optimum_benchmark_commit": "78d7da546ad060a586060d0d3680bec33f32a45b", |
|
"transformers_version": "4.47.0.dev0", |
|
"transformers_commit": null, |
|
"accelerate_version": "1.1.0", |
|
"accelerate_commit": null, |
|
"diffusers_version": "0.31.0", |
|
"diffusers_commit": null, |
|
"optimum_version": "1.24.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "1.0.11", |
|
"timm_commit": null, |
|
"peft_version": "0.13.2", |
|
"peft_commit": null |
|
}, |
|
"print_report": true, |
|
"log_report": true |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 3090.829312, |
|
"max_global_vram": 15467.413504, |
|
"max_process_vram": 14554.23488, |
|
"max_reserved": 13478.395904, |
|
"max_allocated": 13476.849152 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"values": [ |
|
0.27308145141601564 |
|
], |
|
"count": 1, |
|
"total": 0.27308145141601564, |
|
"mean": 0.27308145141601564, |
|
"p50": 0.27308145141601564, |
|
"p90": 0.27308145141601564, |
|
"p95": 0.27308145141601564, |
|
"p99": 0.27308145141601564, |
|
"stdev": 0, |
|
"stdev_": 0 |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 4372.688896, |
|
"max_global_vram": 15930.884096, |
|
"max_process_vram": 15017.705472, |
|
"max_reserved": 13679.722496, |
|
"max_allocated": 13565.197824 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"values": [ |
|
0.042849918365478516, |
|
0.042336288452148436, |
|
0.04412355041503906, |
|
0.1032154541015625, |
|
0.11535842895507813, |
|
0.04317612838745117, |
|
0.04179769515991211, |
|
0.04377532958984375, |
|
0.04315631866455078, |
|
0.0427691535949707 |
|
], |
|
"count": 10, |
|
"total": 0.5625582656860352, |
|
"mean": 0.05625582656860352, |
|
"p50": 0.043166223526000976, |
|
"p90": 0.10442975158691406, |
|
"p95": 0.10989409027099609, |
|
"p99": 0.11426556121826172, |
|
"stdev": 0.026661497700024808, |
|
"stdev_": 47.393308971313985 |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 2275.3198700921234 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 4372.688896, |
|
"max_global_vram": 15930.884096, |
|
"max_process_vram": 15017.705472, |
|
"max_reserved": 13679.722496, |
|
"max_allocated": 13579.885056 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"values": [ |
|
1.3066851806640625, |
|
1.3094490966796875, |
|
2.044265380859375, |
|
3.5165107421875, |
|
1.70470849609375, |
|
1.2808111572265626, |
|
1.28964453125, |
|
1.3141204833984375, |
|
1.31193017578125, |
|
1.3033433837890624 |
|
], |
|
"count": 10, |
|
"total": 16.381468627929685, |
|
"mean": 1.6381468627929685, |
|
"p50": 1.3106896362304687, |
|
"p90": 2.191489916992187, |
|
"p95": 2.854000329589842, |
|
"p99": 3.3840086596679693, |
|
"stdev": 0.6699379901058647, |
|
"stdev_": 40.896088459593294 |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 18.923822218934852 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"values": [ |
|
0.04191542434692383, |
|
0.042579071044921875, |
|
0.042423648834228514, |
|
0.042667072296142576, |
|
0.04215135955810547, |
|
0.042577407836914063, |
|
0.042367488861083984, |
|
0.042291553497314456, |
|
0.04188163375854492, |
|
0.042248767852783205, |
|
0.04241222381591797, |
|
0.04260515213012695, |
|
0.04217718505859375, |
|
0.04143420791625976, |
|
0.041810047149658205, |
|
0.042092544555664066, |
|
0.04116432189941406, |
|
0.04142409515380859, |
|
0.041408222198486326, |
|
0.04132032012939453, |
|
0.04229532623291016, |
|
0.04164476776123047, |
|
0.041377185821533206, |
|
0.04176531219482422, |
|
0.042105438232421875, |
|
0.042758590698242185, |
|
0.04246393585205078, |
|
0.04236259078979492, |
|
0.042546688079833986, |
|
0.04253504180908203, |
|
0.042829856872558594, |
|
0.042415454864501954, |
|
0.04203772735595703, |
|
0.042286048889160155, |
|
0.04240460968017578, |
|
0.04173625564575195, |
|
0.04174921417236328, |
|
0.04293366241455078, |
|
0.042129150390625, |
|
0.04229241561889648, |
|
0.041737438201904294, |
|
0.041796993255615235, |
|
0.04160700988769531, |
|
0.04175289535522461, |
|
0.04123561477661133, |
|
0.04185567855834961, |
|
0.04204278564453125, |
|
0.04217523193359375, |
|
0.0423392333984375, |
|
0.04258483123779297, |
|
0.04281520080566406, |
|
0.042122207641601565, |
|
0.04272540664672852, |
|
0.041860958099365235, |
|
0.041877567291259764, |
|
0.042411903381347656, |
|
0.04223100662231445, |
|
0.04210918426513672, |
|
0.04216057586669922, |
|
0.04233536148071289, |
|
0.04333359909057617, |
|
0.04332051086425781, |
|
0.042850177764892576, |
|
0.042695457458496094, |
|
0.042466495513916014, |
|
0.04299929428100586, |
|
0.0425247688293457, |
|
0.04241743850708008, |
|
0.042557952880859375, |
|
0.0421776008605957, |
|
0.04174169540405273, |
|
0.04224505615234375, |
|
0.04266511917114258, |
|
0.04202025604248047, |
|
0.04272832107543945, |
|
0.04281760025024414, |
|
0.04416921615600586, |
|
0.06887305450439453, |
|
0.05605843353271484, |
|
0.05668460845947266, |
|
0.05483055877685547, |
|
0.08897789001464844, |
|
0.09750096130371094, |
|
0.09425520324707032, |
|
0.0924443817138672, |
|
0.09535769653320313, |
|
0.09481827545166016, |
|
0.09346739196777344, |
|
0.09819728088378907, |
|
0.10047555541992187, |
|
0.10317075347900391, |
|
0.10775225830078125, |
|
0.100318115234375, |
|
0.10855017852783203, |
|
0.1130796127319336, |
|
0.11600943756103516, |
|
0.11347756958007812, |
|
0.09833424377441406, |
|
0.11609926605224609, |
|
0.11958451080322266, |
|
0.11272882843017579, |
|
0.11193257904052735, |
|
0.11175071716308593, |
|
0.10165532684326171, |
|
0.10539485168457031, |
|
0.1210396499633789, |
|
0.10085305786132813, |
|
0.10247881317138671, |
|
0.11099820709228515, |
|
0.11294588470458984, |
|
0.1135646743774414, |
|
0.11586153411865234, |
|
0.11811955261230468, |
|
0.11158716583251953, |
|
0.11592499542236329, |
|
0.11071324920654296, |
|
0.0948584976196289, |
|
0.11060707092285156, |
|
0.11323760223388672, |
|
0.11380912017822266, |
|
0.12807974243164064, |
|
0.13467039489746094, |
|
0.126724609375, |
|
0.1291678466796875, |
|
0.10089337921142578, |
|
0.11095187377929687, |
|
0.12142182159423828, |
|
0.12283734130859375, |
|
0.11575263977050781, |
|
0.0758150405883789, |
|
0.04207126235961914, |
|
0.04189494323730469, |
|
0.041712799072265626, |
|
0.04230758285522461, |
|
0.04281987380981445, |
|
0.04235769653320313, |
|
0.042498977661132815, |
|
0.041836769104003906, |
|
0.04191897583007813, |
|
0.04201078414916992, |
|
0.04220412826538086, |
|
0.04233091354370117, |
|
0.0425052490234375, |
|
0.04208924865722656, |
|
0.04268105697631836, |
|
0.042538017272949216, |
|
0.04214044952392578, |
|
0.04245888137817383, |
|
0.041575809478759766, |
|
0.04219535827636719, |
|
0.04172409439086914, |
|
0.042412479400634764, |
|
0.04233359909057617, |
|
0.04264799880981445, |
|
0.042741374969482424, |
|
0.042078495025634766, |
|
0.0416409912109375, |
|
0.041192161560058595, |
|
0.04195363235473633, |
|
0.041746974945068356, |
|
0.04148643112182617, |
|
0.04147795104980469, |
|
0.04144079971313477, |
|
0.0417437744140625, |
|
0.04227993774414063, |
|
0.04174451065063477, |
|
0.04158595275878906, |
|
0.04163116836547852, |
|
0.04165110397338867, |
|
0.04138857650756836, |
|
0.041430526733398435, |
|
0.04143731307983398, |
|
0.04160422515869141, |
|
0.04230012893676758, |
|
0.04157596969604492, |
|
0.040909534454345704, |
|
0.039825374603271485, |
|
0.040901119232177735, |
|
0.041000991821289065, |
|
0.040124702453613284, |
|
0.04002646255493164, |
|
0.04019046401977539, |
|
0.04064220809936524, |
|
0.040949886322021484, |
|
0.04107136154174805, |
|
0.04081779098510742, |
|
0.04128435134887695, |
|
0.04092412948608398, |
|
0.041256832122802733, |
|
0.041461185455322264, |
|
0.041255870819091794, |
|
0.04165849685668945, |
|
0.04133014297485352, |
|
0.04230928039550781, |
|
0.04048223876953125, |
|
0.041119361877441404, |
|
0.0415755844116211, |
|
0.041320480346679685, |
|
0.041856609344482425, |
|
0.040470943450927735, |
|
0.04156825637817383, |
|
0.04154057693481445, |
|
0.04136751937866211, |
|
0.042007358551025394, |
|
0.041194496154785154, |
|
0.0407567024230957, |
|
0.04147974395751953, |
|
0.041043521881103516, |
|
0.04155209732055664, |
|
0.041494560241699216, |
|
0.041851360321044924, |
|
0.04305696105957031, |
|
0.04270671844482422, |
|
0.04219990539550781, |
|
0.041774879455566405, |
|
0.042256160736083986, |
|
0.04242182540893555, |
|
0.042608448028564457, |
|
0.04171971130371094, |
|
0.041851264953613285, |
|
0.042472095489501954, |
|
0.042538814544677735, |
|
0.04312438583374024, |
|
0.04250979232788086, |
|
0.04258022308349609, |
|
0.042651966094970704, |
|
0.04271868896484375, |
|
0.04214729690551758, |
|
0.04235161590576172, |
|
0.04262947082519531, |
|
0.042045951843261715, |
|
0.04250300979614258, |
|
0.04245475387573242, |
|
0.04317555236816406, |
|
0.043292800903320314, |
|
0.04280947113037109, |
|
0.042802783966064455, |
|
0.04177932739257813, |
|
0.04160086441040039, |
|
0.04290496063232422, |
|
0.042257312774658204, |
|
0.04274870300292969, |
|
0.0417022705078125, |
|
0.041853984832763674, |
|
0.041931167602539066, |
|
0.041278816223144534, |
|
0.0417850227355957, |
|
0.04220809555053711, |
|
0.042773441314697264, |
|
0.042192256927490235, |
|
0.04251903915405274, |
|
0.04218326568603516, |
|
0.04159587097167969, |
|
0.042288639068603515, |
|
0.04183583831787109, |
|
0.04216559982299805, |
|
0.04204422378540039, |
|
0.04211254501342773, |
|
0.04194307327270508, |
|
0.04218828964233398, |
|
0.041896446228027344, |
|
0.04142230224609375, |
|
0.04290748977661133, |
|
0.04234604644775391, |
|
0.04259664154052734, |
|
0.04191836929321289, |
|
0.04131011199951172, |
|
0.041224449157714844, |
|
0.04189164733886719, |
|
0.04182463836669922, |
|
0.04202316665649414, |
|
0.041646400451660154, |
|
0.041784992218017576, |
|
0.043639873504638674, |
|
0.04309779357910156, |
|
0.04308710479736328, |
|
0.04272867202758789, |
|
0.04309328079223633, |
|
0.04458243179321289, |
|
0.04276784133911133, |
|
0.042258144378662106, |
|
0.04150400161743164, |
|
0.041438846588134765, |
|
0.0415425910949707, |
|
0.04258816146850586, |
|
0.04307843017578125, |
|
0.04271952056884765, |
|
0.04265644836425781, |
|
0.0416759033203125, |
|
0.04200495910644531, |
|
0.042205631256103514, |
|
0.04226015853881836, |
|
0.04359702301025391, |
|
0.04363199996948242, |
|
0.04134511947631836, |
|
0.04166060638427734, |
|
0.042258975982666015, |
|
0.04277664184570312, |
|
0.041848033905029294, |
|
0.041294849395751954, |
|
0.041416671752929686, |
|
0.04120956802368164, |
|
0.040435550689697265, |
|
0.04215622329711914, |
|
0.041587776184082034, |
|
0.04140332794189453, |
|
0.04134793472290039, |
|
0.04149744033813477, |
|
0.04164566421508789, |
|
0.04253910446166992 |
|
], |
|
"count": 310, |
|
"total": 16.368514682769767, |
|
"mean": 0.052801660266999284, |
|
"p50": 0.04225246429443359, |
|
"p90": 0.1033931632995606, |
|
"p95": 0.11352547721862793, |
|
"p99": 0.12637475524902353, |
|
"stdev": 0.02475334956189447, |
|
"stdev_": 46.87986975546896 |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 18.938798419279895 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |