|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.252864, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-211.872.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594", |
|
"accelerate_version": "0.34.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1319.452672, |
|
"max_global_vram": 6768.033792, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 12.7888544921875, |
|
"mean": 12.7888544921875, |
|
"stdev": 0.0, |
|
"p50": 12.7888544921875, |
|
"p90": 12.7888544921875, |
|
"p95": 12.7888544921875, |
|
"p99": 12.7888544921875, |
|
"values": [ |
|
12.7888544921875 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1747.80416, |
|
"max_global_vram": 6789.005312, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6142.558208, |
|
"max_allocated": 5028.450816 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.04456255912780761, |
|
"mean": 0.022281279563903807, |
|
"stdev": 0.00045068836212158216, |
|
"p50": 0.022281279563903807, |
|
"p90": 0.022641830253601076, |
|
"p95": 0.022686899089813233, |
|
"p99": 0.02272295415878296, |
|
"values": [ |
|
0.021830591201782226, |
|
0.02273196792602539 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 314.16508104589127 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1772.556288, |
|
"max_global_vram": 6793.199616, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6146.752512, |
|
"max_allocated": 5031.820288 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 5.178685302734375, |
|
"mean": 2.5893426513671876, |
|
"stdev": 0.009314086914062436, |
|
"p50": 2.5893426513671876, |
|
"p90": 2.5967939208984374, |
|
"p95": 2.597725329589844, |
|
"p99": 2.5984704565429686, |
|
"values": [ |
|
2.580028564453125, |
|
2.59865673828125 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 49.047197339040196 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 5.178421249389649, |
|
"mean": 0.020387485233817514, |
|
"stdev": 0.0003491871655363828, |
|
"p50": 0.02017638397216797, |
|
"p90": 0.020863693046569827, |
|
"p95": 0.020892671585083008, |
|
"p99": 0.02104976406097412, |
|
"values": [ |
|
0.02024345588684082, |
|
0.02021683120727539, |
|
0.02039193534851074, |
|
0.020117504119873047, |
|
0.019938304901123048, |
|
0.020144128799438478, |
|
0.020151296615600587, |
|
0.020208639144897463, |
|
0.020163583755493163, |
|
0.02020966339111328, |
|
0.020552703857421875, |
|
0.02150092887878418, |
|
0.021191680908203125, |
|
0.021037055969238282, |
|
0.02102783966064453, |
|
0.020616191864013672, |
|
0.02081996726989746, |
|
0.02085068893432617, |
|
0.020777984619140624, |
|
0.020523008346557618, |
|
0.02084864044189453, |
|
0.02086911964416504, |
|
0.02085068893432617, |
|
0.02082713508605957, |
|
0.02085785675048828, |
|
0.019923967361450197, |
|
0.020149248123168945, |
|
0.020102144241333008, |
|
0.020125696182250977, |
|
0.019957759857177734, |
|
0.020107263565063475, |
|
0.020168703079223634, |
|
0.020167680740356447, |
|
0.020134912490844727, |
|
0.020131839752197265, |
|
0.02011136054992676, |
|
0.020161535263061522, |
|
0.02007040023803711, |
|
0.020141056060791016, |
|
0.020113407135009767, |
|
0.01997724723815918, |
|
0.020070367813110352, |
|
0.020105215072631837, |
|
0.020009983062744142, |
|
0.01997209548950195, |
|
0.019926015853881835, |
|
0.020132863998413086, |
|
0.020170751571655272, |
|
0.020183040618896485, |
|
0.020160512924194338, |
|
0.020179967880249023, |
|
0.020092927932739257, |
|
0.020137983322143553, |
|
0.02012057685852051, |
|
0.020150272369384766, |
|
0.02011136054992676, |
|
0.020178943634033202, |
|
0.020157440185546875, |
|
0.020148223876953125, |
|
0.020129791259765627, |
|
0.02021683120727539, |
|
0.01992192077636719, |
|
0.019926015853881835, |
|
0.020031488418579102, |
|
0.020108287811279296, |
|
0.020180992126464844, |
|
0.020145151138305666, |
|
0.020176895141601564, |
|
0.020126720428466797, |
|
0.020130815505981444, |
|
0.02012774467468262, |
|
0.020145151138305666, |
|
0.020180992126464844, |
|
0.02019327926635742, |
|
0.02020966339111328, |
|
0.020132863998413086, |
|
0.020116479873657226, |
|
0.02012876892089844, |
|
0.020145151138305666, |
|
0.020137983322143553, |
|
0.02017791938781738, |
|
0.020131839752197265, |
|
0.020183040618896485, |
|
0.020174848556518556, |
|
0.020133888244628906, |
|
0.020113407135009767, |
|
0.02021171188354492, |
|
0.020156415939331054, |
|
0.020114431381225584, |
|
0.020147199630737304, |
|
0.020147199630737304, |
|
0.020176895141601564, |
|
0.02012057685852051, |
|
0.020125696182250977, |
|
0.020106239318847655, |
|
0.02012057685852051, |
|
0.02008678436279297, |
|
0.020122623443603514, |
|
0.020172800064086914, |
|
0.020141056060791016, |
|
0.020132863998413086, |
|
0.020200447082519533, |
|
0.020167680740356447, |
|
0.020126720428466797, |
|
0.020108287811279296, |
|
0.020107263565063475, |
|
0.020195327758789062, |
|
0.02012057685852051, |
|
0.020148223876953125, |
|
0.020124671936035156, |
|
0.020755456924438476, |
|
0.0210513916015625, |
|
0.02084556770324707, |
|
0.02087936019897461, |
|
0.02082815933227539, |
|
0.020855808258056642, |
|
0.02087833595275879, |
|
0.020824064254760744, |
|
0.02085273551940918, |
|
0.020867071151733398, |
|
0.020892671585083008, |
|
0.020783103942871094, |
|
0.02079539108276367, |
|
0.020768768310546876, |
|
0.020892671585083008, |
|
0.020817920684814452, |
|
0.020909055709838868, |
|
0.020939775466918945, |
|
0.020780031204223632, |
|
0.02082713508605957, |
|
0.020816896438598635, |
|
0.020802560806274413, |
|
0.020779008865356444, |
|
0.020749311447143554, |
|
0.020839424133300782, |
|
0.020809791564941407, |
|
0.020801471710205077, |
|
0.02084351921081543, |
|
0.020794368743896483, |
|
0.0208353271484375, |
|
0.020785152435302736, |
|
0.02046976089477539, |
|
0.020398080825805662, |
|
0.02043187141418457, |
|
0.020709375381469726, |
|
0.02082611274719238, |
|
0.02083430480957031, |
|
0.0208353271484375, |
|
0.020839424133300782, |
|
0.020822015762329102, |
|
0.020907007217407226, |
|
0.020779008865356444, |
|
0.020801536560058592, |
|
0.02084556770324707, |
|
0.020864000320434572, |
|
0.020831232070922853, |
|
0.02083737564086914, |
|
0.020823040008544923, |
|
0.02012057685852051, |
|
0.02012876892089844, |
|
0.02019430351257324, |
|
0.020122623443603514, |
|
0.020175872802734376, |
|
0.020157440185546875, |
|
0.020122623443603514, |
|
0.020153343200683595, |
|
0.020143104553222657, |
|
0.020172800064086914, |
|
0.019934207916259765, |
|
0.019961856842041017, |
|
0.02024140739440918, |
|
0.02011238479614258, |
|
0.019984384536743165, |
|
0.020092927932739257, |
|
0.020144128799438478, |
|
0.020153343200683595, |
|
0.020109312057495117, |
|
0.020136959075927736, |
|
0.020143104553222657, |
|
0.020150272369384766, |
|
0.020140031814575195, |
|
0.020144128799438478, |
|
0.020133888244628906, |
|
0.020134912490844727, |
|
0.020188159942626953, |
|
0.020180992126464844, |
|
0.020174848556518556, |
|
0.020179967880249023, |
|
0.020142080307006836, |
|
0.020134912490844727, |
|
0.02011136054992676, |
|
0.020160512924194338, |
|
0.020145151138305666, |
|
0.020247552871704103, |
|
0.020158464431762696, |
|
0.02019327926635742, |
|
0.020167680740356447, |
|
0.020182016372680665, |
|
0.020167680740356447, |
|
0.020195327758789062, |
|
0.020143104553222657, |
|
0.020129791259765627, |
|
0.020153343200683595, |
|
0.020164608001708984, |
|
0.020160512924194338, |
|
0.020184064865112306, |
|
0.020165632247924805, |
|
0.02104832077026367, |
|
0.020969472885131835, |
|
0.02082099151611328, |
|
0.020876287460327148, |
|
0.02084556770324707, |
|
0.02085171127319336, |
|
0.02085171127319336, |
|
0.020766719818115235, |
|
0.020814847946166993, |
|
0.020797439575195312, |
|
0.02084249687194824, |
|
0.020822015762329102, |
|
0.02084864044189453, |
|
0.02086502456665039, |
|
0.0200581111907959, |
|
0.020023296356201172, |
|
0.020162559509277343, |
|
0.020133888244628906, |
|
0.020168703079223634, |
|
0.020206592559814454, |
|
0.020168703079223634, |
|
0.02012876892089844, |
|
0.020185087203979494, |
|
0.020143104553222657, |
|
0.020161535263061522, |
|
0.020167680740356447, |
|
0.020188159942626953, |
|
0.020137983322143553, |
|
0.020157440185546875, |
|
0.020222976684570314, |
|
0.020198400497436524, |
|
0.020102144241333008, |
|
0.02026803207397461, |
|
0.02088960075378418, |
|
0.020864000320434572, |
|
0.020840448379516603, |
|
0.02086297607421875, |
|
0.02089369583129883, |
|
0.020983808517456053, |
|
0.02088960075378418, |
|
0.020844575881958007, |
|
0.020847583770751955, |
|
0.02088755226135254, |
|
0.020839424133300782, |
|
0.020876287460327148, |
|
0.02085171127319336, |
|
0.02084966468811035 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 49.049698309100975 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |