|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": null, |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.261056, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b", |
|
"accelerate_version": "0.35.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1336.111104, |
|
"max_global_vram": 6775.373824, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 11.5026650390625, |
|
"mean": 11.5026650390625, |
|
"stdev": 0.0, |
|
"p50": 11.5026650390625, |
|
"p90": 11.5026650390625, |
|
"p95": 11.5026650390625, |
|
"p99": 11.5026650390625, |
|
"values": [ |
|
11.5026650390625 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1781.06368, |
|
"max_global_vram": 6796.345344, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6142.558208, |
|
"max_allocated": 5028.431872 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.04159145545959472, |
|
"mean": 0.02079572772979736, |
|
"stdev": 9.697628021240223e-05, |
|
"p50": 0.02079572772979736, |
|
"p90": 0.020873308753967285, |
|
"p95": 0.020883006381988527, |
|
"p99": 0.02089076448440552, |
|
"values": [ |
|
0.02069875144958496, |
|
0.020892704010009765 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 336.6075999336143 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1839.099904, |
|
"max_global_vram": 6800.539648, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6146.752512, |
|
"max_allocated": 5031.801344 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 5.098266357421875, |
|
"mean": 2.5491331787109375, |
|
"stdev": 0.0021544189453126528, |
|
"p50": 2.5491331787109375, |
|
"p90": 2.5508567138671876, |
|
"p95": 2.5510721557617186, |
|
"p99": 2.551244509277344, |
|
"values": [ |
|
2.55128759765625, |
|
2.546978759765625 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 49.82085716848352 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 5.0979501895904535, |
|
"mean": 0.020070670037757695, |
|
"stdev": 0.0005546999471228763, |
|
"p50": 0.020323840141296387, |
|
"p90": 0.020567039489746093, |
|
"p95": 0.020685157680511473, |
|
"p99": 0.021063240242004393, |
|
"values": [ |
|
0.01923583984375, |
|
0.01922867202758789, |
|
0.019375104904174805, |
|
0.01923072052001953, |
|
0.01922150421142578, |
|
0.01925017547607422, |
|
0.019321855545043946, |
|
0.01922355270385742, |
|
0.0192174072265625, |
|
0.019549184799194336, |
|
0.019305471420288087, |
|
0.01921843147277832, |
|
0.019336191177368164, |
|
0.01923072052001953, |
|
0.01921331214904785, |
|
0.019183616638183593, |
|
0.01927475166320801, |
|
0.01942937660217285, |
|
0.019418111801147463, |
|
0.020405248641967775, |
|
0.021134336471557616, |
|
0.020541439056396483, |
|
0.020435968399047853, |
|
0.020471807479858398, |
|
0.02048307228088379, |
|
0.020382720947265624, |
|
0.020351999282836913, |
|
0.02046566390991211, |
|
0.020255775451660157, |
|
0.02028028869628906, |
|
0.020477951049804686, |
|
0.02031718444824219, |
|
0.02027724838256836, |
|
0.01963315200805664, |
|
0.020419584274291993, |
|
0.01941196823120117, |
|
0.01927577590942383, |
|
0.01923686408996582, |
|
0.020345855712890625, |
|
0.020462591171264647, |
|
0.02026905632019043, |
|
0.020341760635375978, |
|
0.02032640075683594, |
|
0.02026393508911133, |
|
0.020351999282836913, |
|
0.020331520080566406, |
|
0.020495359420776366, |
|
0.020368383407592772, |
|
0.020303871154785155, |
|
0.020370431900024414, |
|
0.02049126434326172, |
|
0.019281919479370118, |
|
0.01925324821472168, |
|
0.01922764778137207, |
|
0.019663871765136717, |
|
0.020282367706298828, |
|
0.020264959335327147, |
|
0.02045030403137207, |
|
0.020601856231689454, |
|
0.02044927978515625, |
|
0.02043801689147949, |
|
0.020528127670288086, |
|
0.019351551055908203, |
|
0.019499008178710937, |
|
0.019273759841918946, |
|
0.019324895858764648, |
|
0.019406848907470704, |
|
0.019571712493896484, |
|
0.020299776077270508, |
|
0.02043391990661621, |
|
0.0194652156829834, |
|
0.020460544586181642, |
|
0.02060697555541992, |
|
0.020462591171264647, |
|
0.02079641532897949, |
|
0.02057318305969238, |
|
0.020576255798339844, |
|
0.02046566390991211, |
|
0.01945088005065918, |
|
0.019513343811035155, |
|
0.01942323112487793, |
|
0.019405824661254883, |
|
0.020529151916503906, |
|
0.02050662422180176, |
|
0.02048307228088379, |
|
0.020486143112182616, |
|
0.01944063949584961, |
|
0.019501056671142578, |
|
0.019341312408447265, |
|
0.02002841567993164, |
|
0.02041651153564453, |
|
0.02047590446472168, |
|
0.02048102378845215, |
|
0.019893247604370116, |
|
0.020666368484497072, |
|
0.02050764846801758, |
|
0.02020249557495117, |
|
0.019389440536499023, |
|
0.019361791610717775, |
|
0.02088652801513672, |
|
0.020502527236938475, |
|
0.021000192642211913, |
|
0.02162073516845703, |
|
0.020730880737304686, |
|
0.020582399368286132, |
|
0.020549631118774413, |
|
0.02044108772277832, |
|
0.02044825553894043, |
|
0.02042367935180664, |
|
0.020410367965698242, |
|
0.020567039489746093, |
|
0.020477951049804686, |
|
0.020411392211914063, |
|
0.020370431900024414, |
|
0.02039910316467285, |
|
0.02035916709899902, |
|
0.02051481628417969, |
|
0.02049955177307129, |
|
0.020391839981079102, |
|
0.02042265510559082, |
|
0.02046976089477539, |
|
0.02045337677001953, |
|
0.020392959594726562, |
|
0.020380672454833985, |
|
0.020534271240234374, |
|
0.02044108772277832, |
|
0.020487167358398437, |
|
0.019173376083374022, |
|
0.019165184020996092, |
|
0.019153919219970703, |
|
0.019384319305419923, |
|
0.01920102310180664, |
|
0.019165184020996092, |
|
0.01927475166320801, |
|
0.01921331214904785, |
|
0.019188735961914064, |
|
0.01944883155822754, |
|
0.01924710464477539, |
|
0.01925939178466797, |
|
0.01924198341369629, |
|
0.019314687728881837, |
|
0.01925324821472168, |
|
0.019396608352661132, |
|
0.019151872634887695, |
|
0.020371456146240235, |
|
0.020398080825805662, |
|
0.020346879959106445, |
|
0.02046566390991211, |
|
0.020246528625488282, |
|
0.020222976684570314, |
|
0.020321279525756835, |
|
0.02028441619873047, |
|
0.02025267219543457, |
|
0.020299776077270508, |
|
0.020197376251220703, |
|
0.0202608642578125, |
|
0.020321279525756835, |
|
0.020376575469970702, |
|
0.02026905632019043, |
|
0.020419584274291993, |
|
0.0202608642578125, |
|
0.02027622413635254, |
|
0.01925017547607422, |
|
0.019155967712402345, |
|
0.019779584884643556, |
|
0.02024448013305664, |
|
0.020230144500732423, |
|
0.020281343460083007, |
|
0.01946419143676758, |
|
0.020249599456787108, |
|
0.01924198341369629, |
|
0.020114431381225584, |
|
0.01990656089782715, |
|
0.020479999542236327, |
|
0.02028544044494629, |
|
0.02022809600830078, |
|
0.020287488937377928, |
|
0.020311040878295897, |
|
0.02024448013305664, |
|
0.01924095916748047, |
|
0.019172351837158205, |
|
0.019212287902832033, |
|
0.01924198341369629, |
|
0.019174400329589843, |
|
0.01944268798828125, |
|
0.01949388885498047, |
|
0.019331071853637697, |
|
0.01943552017211914, |
|
0.019300352096557616, |
|
0.019346431732177736, |
|
0.020231168746948244, |
|
0.020528127670288086, |
|
0.020892671585083008, |
|
0.020684799194335936, |
|
0.020750335693359375, |
|
0.020797439575195312, |
|
0.020512767791748047, |
|
0.020462591171264647, |
|
0.02041753578186035, |
|
0.020808704376220705, |
|
0.020107263565063475, |
|
0.020176895141601564, |
|
0.021357568740844726, |
|
0.02046463966369629, |
|
0.02037555122375488, |
|
0.020567039489746093, |
|
0.02067251205444336, |
|
0.020493312835693358, |
|
0.02043801689147949, |
|
0.020468736648559572, |
|
0.02046668815612793, |
|
0.01943142318725586, |
|
0.02039193534851074, |
|
0.02046463966369629, |
|
0.020426752090454102, |
|
0.02050048065185547, |
|
0.02046771240234375, |
|
0.02066739273071289, |
|
0.020541439056396483, |
|
0.02040934371948242, |
|
0.020506656646728516, |
|
0.020459487915039064, |
|
0.020578304290771485, |
|
0.02045747184753418, |
|
0.02043391990661621, |
|
0.02045337677001953, |
|
0.020567039489746093, |
|
0.020752384185791017, |
|
0.0204769287109375, |
|
0.020398080825805662, |
|
0.019412992477416992, |
|
0.019372032165527343, |
|
0.019393535614013673, |
|
0.020091903686523437, |
|
0.02041651153564453, |
|
0.02042367935180664, |
|
0.020489215850830078, |
|
0.01944883155822754, |
|
0.019794944763183595, |
|
0.020487167358398437, |
|
0.020494335174560546, |
|
0.02040012741088867, |
|
0.02039091110229492, |
|
0.020539392471313478, |
|
0.020644863128662108, |
|
0.020380672454833985, |
|
0.020685823440551757, |
|
0.02060492706298828, |
|
0.02048409652709961, |
|
0.01941196823120117, |
|
0.019504127502441407, |
|
0.019511295318603517, |
|
0.019314687728881837, |
|
0.020092927932739257 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 49.82394698925161 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |