|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.261056, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b", |
|
"accelerate_version": "0.35.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1335.853056, |
|
"max_global_vram": 6775.373824, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 11.5549052734375, |
|
"mean": 11.5549052734375, |
|
"stdev": 0.0, |
|
"p50": 11.5549052734375, |
|
"p90": 11.5549052734375, |
|
"p95": 11.5549052734375, |
|
"p99": 11.5549052734375, |
|
"values": [ |
|
11.5549052734375 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1815.908352, |
|
"max_global_vram": 6796.345344, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6142.558208, |
|
"max_allocated": 5028.450816 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.04465740776062012, |
|
"mean": 0.02232870388031006, |
|
"stdev": 4.7072410583496455e-05, |
|
"p50": 0.02232870388031006, |
|
"p90": 0.022366361808776856, |
|
"p95": 0.022371069049835206, |
|
"p99": 0.022374834842681886, |
|
"values": [ |
|
0.022281631469726563, |
|
0.022375776290893556 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 313.4978204522097 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1842.540544, |
|
"max_global_vram": 6800.539648, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6146.752512, |
|
"max_allocated": 5031.820288 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 5.27243017578125, |
|
"mean": 2.636215087890625, |
|
"stdev": 0.004873291015624837, |
|
"p50": 2.636215087890625, |
|
"p90": 2.6401137207031247, |
|
"p95": 2.6406010498046872, |
|
"p99": 2.6409909130859375, |
|
"values": [ |
|
2.631341796875, |
|
2.64108837890625 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 48.175128267557035 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 5.272176639556889, |
|
"mean": 0.020756600943137343, |
|
"stdev": 0.0003286462326815648, |
|
"p50": 0.020604415893554686, |
|
"p90": 0.021191167449951174, |
|
"p95": 0.0212695556640625, |
|
"p99": 0.021576816596984863, |
|
"values": [ |
|
0.020455423355102538, |
|
0.020501504898071288, |
|
0.020504575729370117, |
|
0.020818944931030273, |
|
0.020566015243530272, |
|
0.020714496612548827, |
|
0.021146623611450196, |
|
0.020980735778808594, |
|
0.020758527755737305, |
|
0.020403200149536133, |
|
0.020913152694702147, |
|
0.02104217529296875, |
|
0.021230592727661132, |
|
0.02109337615966797, |
|
0.020504575729370117, |
|
0.020649984359741212, |
|
0.020347904205322266, |
|
0.021638143539428712, |
|
0.020568063735961914, |
|
0.020503551483154296, |
|
0.02122035217285156, |
|
0.020998144149780275, |
|
0.020823040008544923, |
|
0.02049126434326172, |
|
0.0204902400970459, |
|
0.02109132766723633, |
|
0.021209087371826172, |
|
0.02048204803466797, |
|
0.020555776596069338, |
|
0.020548639297485353, |
|
0.0204564151763916, |
|
0.02043187141418457, |
|
0.02049843215942383, |
|
0.020509695053100584, |
|
0.02068377685546875, |
|
0.020497407913208008, |
|
0.02046976089477539, |
|
0.02048307228088379, |
|
0.02045952033996582, |
|
0.020756479263305663, |
|
0.0210513916015625, |
|
0.020501504898071288, |
|
0.020485120773315428, |
|
0.02025574493408203, |
|
0.020546560287475587, |
|
0.020682752609252928, |
|
0.021238784790039062, |
|
0.020538368225097657, |
|
0.020516864776611327, |
|
0.020412416458129884, |
|
0.020497407913208008, |
|
0.020447231292724608, |
|
0.020470783233642577, |
|
0.02045337677001953, |
|
0.020552703857421875, |
|
0.020542463302612304, |
|
0.02049945640563965, |
|
0.020477951049804686, |
|
0.020444160461425782, |
|
0.02045337677001953, |
|
0.02047488021850586, |
|
0.020518911361694335, |
|
0.020462591171264647, |
|
0.02045644760131836, |
|
0.02044313621520996, |
|
0.020496383666992187, |
|
0.020586496353149415, |
|
0.020479999542236327, |
|
0.020454399108886717, |
|
0.02045952033996582, |
|
0.020530176162719727, |
|
0.020753408432006838, |
|
0.021046272277832033, |
|
0.02057318305969238, |
|
0.020509695053100584, |
|
0.020454399108886717, |
|
0.020477951049804686, |
|
0.0206561279296875, |
|
0.02191974449157715, |
|
0.02123776054382324, |
|
0.021226495742797852, |
|
0.020554752349853517, |
|
0.020512767791748047, |
|
0.020509695053100584, |
|
0.020497407913208008, |
|
0.02066022491455078, |
|
0.02086297607421875, |
|
0.02084966468811035, |
|
0.020769792556762694, |
|
0.020612096786499022, |
|
0.02050048065185547, |
|
0.022544384002685547, |
|
0.02130534362792969, |
|
0.02128486442565918, |
|
0.02129715156555176, |
|
0.02109132766723633, |
|
0.020487167358398437, |
|
0.020790271759033203, |
|
0.020726783752441406, |
|
0.020520959854125977, |
|
0.02130227279663086, |
|
0.02107494354248047, |
|
0.02106879997253418, |
|
0.021160959243774414, |
|
0.021374975204467773, |
|
0.02103091239929199, |
|
0.020702207565307617, |
|
0.021172224044799806, |
|
0.020479999542236327, |
|
0.02045952033996582, |
|
0.020470783233642577, |
|
0.02107596778869629, |
|
0.02064896011352539, |
|
0.0204902400970459, |
|
0.02065715217590332, |
|
0.02047385597229004, |
|
0.020505599975585938, |
|
0.020625408172607423, |
|
0.02110361671447754, |
|
0.020706304550170897, |
|
0.020571136474609376, |
|
0.02081996726989746, |
|
0.020883455276489257, |
|
0.020477951049804686, |
|
0.020832256317138673, |
|
0.020441120147705077, |
|
0.020441055297851562, |
|
0.021038080215454103, |
|
0.020426752090454102, |
|
0.020387840270996094, |
|
0.020996095657348633, |
|
0.02102272033691406, |
|
0.021006336212158205, |
|
0.020972543716430665, |
|
0.021003263473510742, |
|
0.02103910446166992, |
|
0.021121023178100586, |
|
0.02045132827758789, |
|
0.02044108772277832, |
|
0.0204902400970459, |
|
0.020715520858764647, |
|
0.021384191513061524, |
|
0.02060492706298828, |
|
0.02104832077026367, |
|
0.021013504028320314, |
|
0.020603904724121092, |
|
0.020531200408935548, |
|
0.02046976089477539, |
|
0.02048409652709961, |
|
0.020587520599365236, |
|
0.0212992000579834, |
|
0.02110361671447754, |
|
0.020979711532592774, |
|
0.021161983489990235, |
|
0.021028863906860353, |
|
0.020999168395996092, |
|
0.020963327407836914, |
|
0.020505599975585938, |
|
0.020582399368286132, |
|
0.02051481628417969, |
|
0.020497407913208008, |
|
0.02050662422180176, |
|
0.0204421443939209, |
|
0.020803552627563476, |
|
0.02047283172607422, |
|
0.020625408172607423, |
|
0.02046156883239746, |
|
0.020463615417480468, |
|
0.020520959854125977, |
|
0.020525056838989256, |
|
0.021206016540527343, |
|
0.020724735260009765, |
|
0.020535295486450195, |
|
0.020551679611206054, |
|
0.020471807479858398, |
|
0.02068172836303711, |
|
0.020447231292724608, |
|
0.021012479782104493, |
|
0.020976640701293944, |
|
0.020468736648559572, |
|
0.020715520858764647, |
|
0.02045747184753418, |
|
0.02103398323059082, |
|
0.021133312225341795, |
|
0.0210882568359375, |
|
0.021227519989013673, |
|
0.020669504165649413, |
|
0.020489152908325196, |
|
0.020496383666992187, |
|
0.02125209617614746, |
|
0.02102783966064453, |
|
0.021242879867553712, |
|
0.021021696090698243, |
|
0.020549631118774413, |
|
0.020504575729370117, |
|
0.020997119903564454, |
|
0.020471807479858398, |
|
0.0210513916015625, |
|
0.021167104721069335, |
|
0.020485120773315428, |
|
0.020502527236938475, |
|
0.020454399108886717, |
|
0.02048102378845215, |
|
0.02045849609375, |
|
0.02109337615966797, |
|
0.02106060791015625, |
|
0.02101759910583496, |
|
0.02102374458312988, |
|
0.020288511276245116, |
|
0.020570112228393556, |
|
0.020788223266601562, |
|
0.020486143112182616, |
|
0.020463615417480468, |
|
0.020463615417480468, |
|
0.020593664169311524, |
|
0.02049126434326172, |
|
0.020478975296020507, |
|
0.021117952346801756, |
|
0.02105241584777832, |
|
0.02128691291809082, |
|
0.021522432327270507, |
|
0.02126131248474121, |
|
0.021045248031616212, |
|
0.020929536819458007, |
|
0.021187583923339845, |
|
0.02044927978515625, |
|
0.020477951049804686, |
|
0.02047385597229004, |
|
0.020434944152832032, |
|
0.020495359420776366, |
|
0.02066022491455078, |
|
0.021073919296264648, |
|
0.02107904052734375, |
|
0.02102374458312988, |
|
0.021003263473510742, |
|
0.0210565128326416, |
|
0.02044825553894043, |
|
0.021192703247070312, |
|
0.021324800491333007, |
|
0.0211015682220459, |
|
0.02104729652404785, |
|
0.021086208343505858, |
|
0.02108415985107422, |
|
0.02082918357849121, |
|
0.021124095916748048, |
|
0.021194751739501954, |
|
0.021169151306152344, |
|
0.020539392471313478, |
|
0.020509695053100584, |
|
0.02047590446472168, |
|
0.020968448638916014, |
|
0.02109542465209961, |
|
0.0204769287109375, |
|
0.02049126434326172 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 48.17744498434486 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |