|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.4.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model_type": "gemma", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": "static", |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": true, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"memory": true, |
|
"latency": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"numactl": false, |
|
"numactl_kwargs": {}, |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.261056, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.4.0", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.45.0.dev0", |
|
"transformers_commit": "ecd61c62862f925a18b4f063dc17fcaf01826e25", |
|
"accelerate_version": "0.35.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.22.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.12.1.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"load": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1337.93792, |
|
"max_global_vram": 6768.033792, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 6060.931072 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 1, |
|
"total": 12.7981103515625, |
|
"mean": 12.7981103515625, |
|
"stdev": 0.0, |
|
"p50": 12.7981103515625, |
|
"p90": 12.7981103515625, |
|
"p95": 12.7981103515625, |
|
"p99": 12.7981103515625, |
|
"values": [ |
|
12.7981103515625 |
|
] |
|
}, |
|
"throughput": null, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1858.17088, |
|
"max_global_vram": 6789.005312, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6138.363904, |
|
"max_allocated": 5278.596608 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.0284465274810791, |
|
"mean": 0.01422326374053955, |
|
"stdev": 0.0001012477874755861, |
|
"p50": 0.01422326374053955, |
|
"p90": 0.014304261970520019, |
|
"p95": 0.014314386749267579, |
|
"p99": 0.014322486572265624, |
|
"values": [ |
|
0.014122015953063964, |
|
0.014324511528015137 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 492.1514588841098 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 2028.3392, |
|
"max_global_vram": 6071.779328, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 5381.292032, |
|
"max_allocated": 5282.647552 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 3.1311047363281252, |
|
"mean": 1.5655523681640626, |
|
"stdev": 3.210449218749023e-05, |
|
"p50": 1.5655523681640626, |
|
"p90": 1.5655780517578126, |
|
"p95": 1.5655812622070313, |
|
"p99": 1.5655838305664063, |
|
"values": [ |
|
1.56558447265625, |
|
1.565520263671875 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 81.12152782786438 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 254, |
|
"total": 3.130817541122439, |
|
"mean": 0.012326053311505656, |
|
"stdev": 9.284325024015735e-05, |
|
"p50": 0.012304384231567383, |
|
"p90": 0.012332749176025391, |
|
"p95": 0.01236925401687622, |
|
"p99": 0.012838184852600099, |
|
"values": [ |
|
0.012834815979003907, |
|
0.012775424003601075, |
|
0.012320768356323243, |
|
0.012308480262756348, |
|
0.012299263954162597, |
|
0.012303359985351562, |
|
0.01253990364074707, |
|
0.012324864387512208, |
|
0.012292096138000488, |
|
0.012303359985351562, |
|
0.012305407524108887, |
|
0.01229312038421631, |
|
0.012300288200378418, |
|
0.012298239707946777, |
|
0.012304384231567383, |
|
0.012317695617675782, |
|
0.012294143676757812, |
|
0.012299263954162597, |
|
0.012299263954162597, |
|
0.012328960418701173, |
|
0.01229312038421631, |
|
0.012296192169189453, |
|
0.012305407524108887, |
|
0.01229312038421631, |
|
0.012296192169189453, |
|
0.012294143676757812, |
|
0.012302335739135742, |
|
0.012310527801513671, |
|
0.012303359985351562, |
|
0.012301312446594239, |
|
0.012295167922973632, |
|
0.012304384231567383, |
|
0.012320768356323243, |
|
0.012314623832702636, |
|
0.012307456016540527, |
|
0.01229312038421631, |
|
0.012296192169189453, |
|
0.012314623832702636, |
|
0.012309503555297852, |
|
0.012312576293945313, |
|
0.012301312446594239, |
|
0.012306431770324706, |
|
0.012310527801513671, |
|
0.012301312446594239, |
|
0.012315648078918457, |
|
0.012303359985351562, |
|
0.012309503555297852, |
|
0.012298239707946777, |
|
0.012310527801513671, |
|
0.012295167922973632, |
|
0.012297216415405274, |
|
0.012297216415405274, |
|
0.012295167922973632, |
|
0.012302335739135742, |
|
0.012303359985351562, |
|
0.012528639793395996, |
|
0.012368895530700684, |
|
0.01233510398864746, |
|
0.01234124755859375, |
|
0.012323840141296387, |
|
0.012303359985351562, |
|
0.012298239707946777, |
|
0.012295167922973632, |
|
0.012310527801513671, |
|
0.012305407524108887, |
|
0.012303359985351562, |
|
0.012301312446594239, |
|
0.012302335739135742, |
|
0.012299263954162597, |
|
0.012317695617675782, |
|
0.012354559898376465, |
|
0.012322815895080566, |
|
0.012307456016540527, |
|
0.012304384231567383, |
|
0.012297216415405274, |
|
0.012303359985351562, |
|
0.012298239707946777, |
|
0.012303359985351562, |
|
0.012300288200378418, |
|
0.012356608390808106, |
|
0.012369919776916503, |
|
0.012315648078918457, |
|
0.012318719863891601, |
|
0.012327936172485352, |
|
0.012820480346679687, |
|
0.012732416152954102, |
|
0.012315648078918457, |
|
0.012312576293945313, |
|
0.012307456016540527, |
|
0.012299263954162597, |
|
0.012296192169189453, |
|
0.012308480262756348, |
|
0.012311552047729492, |
|
0.012321791648864745, |
|
0.012324864387512208, |
|
0.012300288200378418, |
|
0.012300288200378418, |
|
0.012313599586486817, |
|
0.012294143676757812, |
|
0.012303359985351562, |
|
0.012296192169189453, |
|
0.012304384231567383, |
|
0.012300288200378418, |
|
0.012296192169189453, |
|
0.012309503555297852, |
|
0.012306431770324706, |
|
0.012323840141296387, |
|
0.012297216415405274, |
|
0.012310527801513671, |
|
0.012299263954162597, |
|
0.012301312446594239, |
|
0.012301312446594239, |
|
0.012311552047729492, |
|
0.012304384231567383, |
|
0.012301312446594239, |
|
0.012309503555297852, |
|
0.012302335739135742, |
|
0.012306431770324706, |
|
0.012314623832702636, |
|
0.012311552047729492, |
|
0.012302335739135742, |
|
0.012302335739135742, |
|
0.012292096138000488, |
|
0.01229312038421631, |
|
0.012307456016540527, |
|
0.012304384231567383, |
|
0.012302335739135742, |
|
0.012340224266052247, |
|
0.012309503555297852, |
|
0.012296192169189453, |
|
0.012296192169189453, |
|
0.012298239707946777, |
|
0.012300288200378418, |
|
0.012302335739135742, |
|
0.012312576293945313, |
|
0.012317695617675782, |
|
0.012303359985351562, |
|
0.012332032203674317, |
|
0.012297216415405274, |
|
0.012299263954162597, |
|
0.012285951614379884, |
|
0.012299263954162597, |
|
0.012297216415405274, |
|
0.012295167922973632, |
|
0.012321791648864745, |
|
0.012323840141296387, |
|
0.012301312446594239, |
|
0.012294143676757812, |
|
0.012307456016540527, |
|
0.012317695617675782, |
|
0.012317695617675782, |
|
0.01236787223815918, |
|
0.012297216415405274, |
|
0.012304384231567383, |
|
0.012300288200378418, |
|
0.012304384231567383, |
|
0.012321791648864745, |
|
0.012304384231567383, |
|
0.012303359985351562, |
|
0.012302335739135742, |
|
0.012283904075622559, |
|
0.012306431770324706, |
|
0.012314623832702636, |
|
0.012333056449890138, |
|
0.012305407524108887, |
|
0.012328960418701173, |
|
0.012296192169189453, |
|
0.012528639793395996, |
|
0.012841983795166016, |
|
0.012322815895080566, |
|
0.012310527801513671, |
|
0.012315648078918457, |
|
0.012328960418701173, |
|
0.012324864387512208, |
|
0.012320768356323243, |
|
0.012302335739135742, |
|
0.012290047645568849, |
|
0.012298239707946777, |
|
0.012297216415405274, |
|
0.012303359985351562, |
|
0.012286975860595703, |
|
0.012294143676757812, |
|
0.012303359985351562, |
|
0.012291168212890625, |
|
0.012306336402893067, |
|
0.012302335739135742, |
|
0.012328960418701173, |
|
0.012313599586486817, |
|
0.012298239707946777, |
|
0.012297216415405274, |
|
0.012292096138000488, |
|
0.012306431770324706, |
|
0.012299263954162597, |
|
0.012311552047729492, |
|
0.012290047645568849, |
|
0.012296192169189453, |
|
0.012292096138000488, |
|
0.01229312038421631, |
|
0.012312576293945313, |
|
0.012327936172485352, |
|
0.012315648078918457, |
|
0.012301312446594239, |
|
0.012304384231567383, |
|
0.012299263954162597, |
|
0.01229312038421631, |
|
0.01229312038421631, |
|
0.012292096138000488, |
|
0.01229312038421631, |
|
0.012294143676757812, |
|
0.012296192169189453, |
|
0.012296256065368652, |
|
0.012306367874145508, |
|
0.012311552047729492, |
|
0.012312576293945313, |
|
0.012303359985351562, |
|
0.012305407524108887, |
|
0.01234227180480957, |
|
0.01232588768005371, |
|
0.012304384231567383, |
|
0.012304384231567383, |
|
0.012299263954162597, |
|
0.012289024353027344, |
|
0.012300288200378418, |
|
0.012294143676757812, |
|
0.012305407524108887, |
|
0.01235148811340332, |
|
0.012319744110107422, |
|
0.012298239707946777, |
|
0.012304384231567383, |
|
0.012295167922973632, |
|
0.012295167922973632, |
|
0.012300288200378418, |
|
0.012371968269348145, |
|
0.012294143676757812, |
|
0.012307456016540527, |
|
0.012916735649108887, |
|
0.01264844799041748, |
|
0.012360704421997071, |
|
0.012340224266052247, |
|
0.012296192169189453, |
|
0.012301312446594239, |
|
0.012306431770324706, |
|
0.012311552047729492, |
|
0.012303359985351562, |
|
0.012296192169189453, |
|
0.012301312446594239, |
|
0.012296192169189453, |
|
0.012304384231567383, |
|
0.012338175773620605, |
|
0.012311552047729492, |
|
0.012309503555297852, |
|
0.012307456016540527, |
|
0.012904447555541992, |
|
0.012327936172485352 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 81.12896924326604 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |