|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": true, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.261056, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b", |
|
"accelerate_version": "0.35.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1353.756672, |
|
"max_global_vram": 6775.373824, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 13.015529296875, |
|
"mean": 13.015529296875, |
|
"stdev": 0.0, |
|
"p50": 13.015529296875, |
|
"p90": 13.015529296875, |
|
"p95": 13.015529296875, |
|
"p99": 13.015529296875, |
|
"values": [ |
|
13.015529296875 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1925.742592, |
|
"max_global_vram": 6796.345344, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 5278.596608 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.02835750389099121, |
|
"mean": 0.014178751945495605, |
|
"stdev": 0.0001983041763305663, |
|
"p50": 0.014178751945495605, |
|
"p90": 0.014337395286560058, |
|
"p95": 0.014357225704193115, |
|
"p99": 0.01437309003829956, |
|
"values": [ |
|
0.013980447769165039, |
|
0.014377056121826172 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 493.69648519902375 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 2098.884608, |
|
"max_global_vram": 6060.244992, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 5381.292032, |
|
"max_allocated": 5282.647552 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 3.13300537109375, |
|
"mean": 1.566502685546875, |
|
"stdev": 0.00012390136718753997, |
|
"p50": 1.566502685546875, |
|
"p90": 1.566601806640625, |
|
"p95": 1.5666141967773437, |
|
"p99": 1.5666241088867188, |
|
"values": [ |
|
1.5666265869140625, |
|
1.5663787841796875 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 81.07231552920292 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 3.1327047624588027, |
|
"mean": 0.012333483316766935, |
|
"stdev": 8.446177575398085e-05, |
|
"p50": 0.012312576293945313, |
|
"p90": 0.012358963108062743, |
|
"p95": 0.012455321550369262, |
|
"p99": 0.012812411012649535, |
|
"values": [ |
|
0.01248256015777588, |
|
0.012323840141296387, |
|
0.01229312038421631, |
|
0.012313632011413575, |
|
0.01231766414642334, |
|
0.012319744110107422, |
|
0.012317695617675782, |
|
0.01233510398864746, |
|
0.01232588768005371, |
|
0.01232588768005371, |
|
0.012973055839538575, |
|
0.012379136085510254, |
|
0.012317695617675782, |
|
0.012305407524108887, |
|
0.012314623832702636, |
|
0.012315648078918457, |
|
0.012308480262756348, |
|
0.012316672325134278, |
|
0.012310527801513671, |
|
0.012319744110107422, |
|
0.012320768356323243, |
|
0.012297216415405274, |
|
0.012305407524108887, |
|
0.01234329605102539, |
|
0.012332032203674317, |
|
0.012310527801513671, |
|
0.012311552047729492, |
|
0.012309503555297852, |
|
0.012303359985351562, |
|
0.012306431770324706, |
|
0.012324864387512208, |
|
0.012306431770324706, |
|
0.012313599586486817, |
|
0.012304384231567383, |
|
0.012310527801513671, |
|
0.012300288200378418, |
|
0.012319744110107422, |
|
0.012326911926269531, |
|
0.012312576293945313, |
|
0.012305407524108887, |
|
0.01237299156188965, |
|
0.012305407524108887, |
|
0.012302335739135742, |
|
0.012308480262756348, |
|
0.012322815895080566, |
|
0.012316672325134278, |
|
0.01258188819885254, |
|
0.012339200019836426, |
|
0.01245798397064209, |
|
0.012301312446594239, |
|
0.012309503555297852, |
|
0.012309503555297852, |
|
0.012300288200378418, |
|
0.012313599586486817, |
|
0.012309503555297852, |
|
0.012314687728881836, |
|
0.01235142421722412, |
|
0.012315648078918457, |
|
0.012317695617675782, |
|
0.012301312446594239, |
|
0.01233510398864746, |
|
0.012315648078918457, |
|
0.012302335739135742, |
|
0.012302335739135742, |
|
0.012321791648864745, |
|
0.012318719863891601, |
|
0.012307456016540527, |
|
0.012322815895080566, |
|
0.012316672325134278, |
|
0.012352512359619141, |
|
0.012310527801513671, |
|
0.012315648078918457, |
|
0.012308480262756348, |
|
0.012328960418701173, |
|
0.012311552047729492, |
|
0.012301312446594239, |
|
0.012307456016540527, |
|
0.012311552047729492, |
|
0.012301312446594239, |
|
0.012306431770324706, |
|
0.012306431770324706, |
|
0.012323840141296387, |
|
0.012323936462402344, |
|
0.012309408187866211, |
|
0.012303359985351562, |
|
0.012295167922973632, |
|
0.012333056449890138, |
|
0.012307519912719727, |
|
0.012306367874145508, |
|
0.012320768356323243, |
|
0.012299263954162597, |
|
0.012304384231567383, |
|
0.012317695617675782, |
|
0.012321791648864745, |
|
0.012974080085754394, |
|
0.012348416328430176, |
|
0.012345343589782716, |
|
0.012336128234863282, |
|
0.012318719863891601, |
|
0.012305407524108887, |
|
0.012307456016540527, |
|
0.012304384231567383, |
|
0.012302335739135742, |
|
0.012310527801513671, |
|
0.012320768356323243, |
|
0.012316672325134278, |
|
0.012308480262756348, |
|
0.012310527801513671, |
|
0.012303359985351562, |
|
0.012323840141296387, |
|
0.012309503555297852, |
|
0.012314623832702636, |
|
0.012310527801513671, |
|
0.012315648078918457, |
|
0.012303359985351562, |
|
0.012314623832702636, |
|
0.012314623832702636, |
|
0.012316703796386719, |
|
0.012520416259765625, |
|
0.012552191734313965, |
|
0.012437503814697265, |
|
0.012316672325134278, |
|
0.012305407524108887, |
|
0.012308480262756348, |
|
0.012294143676757812, |
|
0.012296192169189453, |
|
0.012307456016540527, |
|
0.012339200019836426, |
|
0.012307456016540527, |
|
0.012300288200378418, |
|
0.012292096138000488, |
|
0.012304384231567383, |
|
0.012304384231567383, |
|
0.012305407524108887, |
|
0.012315648078918457, |
|
0.012306431770324706, |
|
0.012465151786804199, |
|
0.01235148811340332, |
|
0.012320768356323243, |
|
0.012302335739135742, |
|
0.012310527801513671, |
|
0.012313599586486817, |
|
0.012303359985351562, |
|
0.012300288200378418, |
|
0.012295167922973632, |
|
0.012305407524108887, |
|
0.012311552047729492, |
|
0.012286975860595703, |
|
0.01244876766204834, |
|
0.012371968269348145, |
|
0.012324864387512208, |
|
0.012306431770324706, |
|
0.012321791648864745, |
|
0.012305407524108887, |
|
0.012286975860595703, |
|
0.012295167922973632, |
|
0.012306431770324706, |
|
0.012309503555297852, |
|
0.012303359985351562, |
|
0.012305407524108887, |
|
0.012302335739135742, |
|
0.012303359985351562, |
|
0.012323840141296387, |
|
0.012321791648864745, |
|
0.012297216415405274, |
|
0.012307456016540527, |
|
0.012291071891784668, |
|
0.012301312446594239, |
|
0.012294143676757812, |
|
0.012313599586486817, |
|
0.012296192169189453, |
|
0.012313599586486817, |
|
0.012319744110107422, |
|
0.012297216415405274, |
|
0.012295167922973632, |
|
0.01234124755859375, |
|
0.012399616241455079, |
|
0.012984319686889649, |
|
0.012328960418701173, |
|
0.012393471717834472, |
|
0.012314623832702636, |
|
0.012323840141296387, |
|
0.01231158447265625, |
|
0.012303327560424805, |
|
0.012297216415405274, |
|
0.012323840141296387, |
|
0.012327936172485352, |
|
0.01233510398864746, |
|
0.012327936172485352, |
|
0.012324864387512208, |
|
0.012403712272644044, |
|
0.012308480262756348, |
|
0.012319744110107422, |
|
0.012315648078918457, |
|
0.012331007957458496, |
|
0.012412927627563476, |
|
0.01233510398864746, |
|
0.01233510398864746, |
|
0.012323840141296387, |
|
0.012389375686645507, |
|
0.012306431770324706, |
|
0.012305407524108887, |
|
0.012310527801513671, |
|
0.012472319602966308, |
|
0.012319744110107422, |
|
0.012309503555297852, |
|
0.012308480262756348, |
|
0.012316672325134278, |
|
0.012301312446594239, |
|
0.012326911926269531, |
|
0.012322815895080566, |
|
0.012304384231567383, |
|
0.012309503555297852, |
|
0.012312576293945313, |
|
0.012304384231567383, |
|
0.012301312446594239, |
|
0.012361727714538574, |
|
0.012352512359619141, |
|
0.012365823745727538, |
|
0.012305407524108887, |
|
0.012309503555297852, |
|
0.012304384231567383, |
|
0.012320768356323243, |
|
0.012453887939453125, |
|
0.012313599586486817, |
|
0.012303359985351562, |
|
0.012304384231567383, |
|
0.012309503555297852, |
|
0.012307456016540527, |
|
0.012324864387512208, |
|
0.012317695617675782, |
|
0.012310527801513671, |
|
0.012303359985351562, |
|
0.012301312446594239, |
|
0.012327936172485352, |
|
0.012326911926269531, |
|
0.012310527801513671, |
|
0.012308480262756348, |
|
0.012348416328430176, |
|
0.012312576293945313, |
|
0.012311552047729492, |
|
0.012313599586486817, |
|
0.012317695617675782, |
|
0.012323840141296387, |
|
0.012304384231567383, |
|
0.012308480262756348, |
|
0.012305407524108887, |
|
0.01233510398864746, |
|
0.012319744110107422, |
|
0.012298239707946777, |
|
0.012309503555297852, |
|
0.012463104248046876, |
|
0.012669952392578124, |
|
0.012488703727722168 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 81.08009508072512 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |